def backstage_list(request): form_operation = request.method operation_info = getattr(request, form_operation, None) model_id = None backs_id = None if operation_info: error_msg = '' model_id = operation_info.get('Modelid', '') backs_id = operation_info.get('Servid', '') # 接收到字符串形式的若干id值 is_remote = int(operation_info.get('is_remote', 0)) # 远程获取其他后台数据标识 db_list = None if is_remote: backsData = get_backstage_data(bid=backs_id) back_url = backsData['url'] + 'sync/backstage/' model_id = get_old_model_id(backsData, model_id) try: response = requests.get(back_url, { "Modelid": model_id, "Servid": backs_id }, timeout=60).content return HttpResponse(response) except: error_msg = "请求失败." + trace_msg() print(trace_msg()) else: model = get_model_class(model_id) if model_id: db_list = model.objects.using('read').all().order_by('-id') else: error_msg = "请求失败." return render(request, 'sync_model/sync_backstage_push.html', { 'db_list': db_list, 'error': error_msg }) serv_list = get_backstage_data() model_list = {} for model_class in apps.get_models(): key = '%s.%s' % (model_class.__module__, model_class.__name__) model_list[key] = model_class._meta.db_table results = {} results['model_id'] = model_id results['backs_id'] = backs_id results['serv_list'] = serv_list results['model_list'] = model_list return render(request, "sync_model/sync_backstage_list.html", results)
def update_server_statistic(self): """更新分服的统计 """ err_servers = [] err_statistic = [] err_msgs = [] for server_model in self.servers: try: self.server_count = 0 # 查询读从库 query_conn = server_model.mysql_conn(from_read_db=True) for statisti_model in self.server_statistic_objs: try: statistic_analysis = StatisticBuilder( self, statisti_model, server_model) statistic_excute = StatisticExcute( query_conn, statistic_analysis, self) statistic_excute.start() except: err_msg = trace_msg() err_msg = '服务器 %s(%s) 统计名(%s) 错误:%s' % ( server_model.name, server_model.id, statisti_model.name, err_msg) _logger.warn(err_msg) err_msgs.append(err_msg) err_servers.append(server_model.id) err_statistic.append(statisti_model.id) query_conn.close() except: for statisti_model in self.server_statistic_objs: err_statistic.append(statisti_model.id) err_servers.append(server_model.id) err_msg = trace_msg() err_msg = '服务器 %s(%s) 错误:%s' % (server_model.name, server_model.id, err_msg) err_msgs.append(err_msg) _logger.warn(err_msg) self.error_server_ids.append(server_model.id) for ss in self.server_statistic_objs: ssa = StatisticBuilder(self, ss) # 检查mysql链接是否失效 close_old_connections() new_ss = Statistic.objects.get(id=ss.id) new_ss.last_exec_time = datetime.datetime.now() new_ss.result_data = '[%s - %s]:(%s)' % ( ssa.sdate_str, ssa.edate_str, self.error_server_ids) new_ss.save() return err_msgs, err_servers, err_statistic
def update_center_statistic(self): """ 更新中央后台的统计 """ err_statistic = [] err_msgs = [] self.server_count = 0 for statisti_model in self.center_statistic_objs: try: statistic_analysis = StatisticBuilder(self, statisti_model, None) query_conn = get_center_conn() statistic_excute = StatisticExcute(query_conn, statistic_analysis, self) statistic_excute.start() except: err_statistic.append(statisti_model.id) err_msg = trace_msg() err_msg = '中央统计 统计名(%s) 错误:%s' % (statisti_model.name, err_msg) _logger.warn(err_msg) err_msgs.append(err_msg) for ss in self.center_statistic_objs: # 检查mysql链接是否失效 close_old_connections() ssa = StatisticBuilder(self, ss) new_ss = Statistic.objects.get(id=ss.id) new_ss.last_exec_time = datetime.datetime.now() new_ss.result_data = '[%s - %s]' % (ssa.sdate_str, ssa.edate_str) new_ss.save() return err_msgs, err_statistic
def hex2ids(cls, hex_str): id_list = set() if not hex_str: return id_list try: bm = BitMap.fromhexstring(hex_str) id_list.update(bm.nonzero()) except Exception as e: logging.error(trace_msg()) finally: return id_list
def start(self): if self.query_sql: try: self.del_old_result() has_data = self.insert_new_result() self.center_conn.commit() except Exception as e: self.center_conn.rollback() err_msg = trace_msg() err_msg = '%s:%s | %s' % (self.statistic_analysis.statistic. name, self.query_sql, err_msg) _logger.warn(err_msg) raise e
def field_config(self): """字段定义 """ try: s_d = OrderedDict() self.__cache_config = self.__cache_config or OrderedDict( sorted(json.loads(self._field_config).items(), key=lambda x: x[1]['order_num'])) except: print(trace_msg()) self.__cache_config = self._DEFAULT_FIELD_CONFIG return self.__cache_config
def authenticate(self): try: is_auth = self._authenticate_user_dn() from myadmin.models.user import User if is_auth: attr_map = self.get_user_attr() user, _ = User.objects.get_or_create(username=self.username) for k, v in attr_map.items(): setattr(user, k, v.decode()) setattr(user, 'ldap_attr_map', attr_map) return user except Exception as e: print(trace_msg()) logging.error(e.args[0]) return None
def test_query(request): '''测试查询 ''' statistic_id = int(request.REQUEST.get('statistic_id', '') or 0) try: if statistic_id: statistic_model = Statistic.objects.get(id=statistic_id) query_model = Query() query_model.sql = statistic_model.sql query_model.name = statistic_model.name query_model.select = 'time,game_alias,sdk_code,log_server,log_channel,log_channel2,log_tag,log_now,log_previous,log_result,log_data' query_model.log_def = statistic_model.log_def return query_view(request, query_model=query_model) except: err_msg = trace_msg() return HttpResponse(err_msg)
def ids2hex(cls, id_list): hex_str = '' if not id_list: return hex_str try: n_l = [int(x) for x in id_list] maxnum = max(n_l) + 1 bm = BitMap(maxnum) bins = [] for i in n_l: bm.set(i) hex_str = bm.tohexstring() except Exception as e: logging.error(trace_msg()) finally: return hex_str
def get_storage_file_list_status(self, backend, type_name, path): path_obj = furl(path) path = path_obj.pathstr origin = furl(self.last_execution_savepoint).origin data_list = [] try: for dir_name, dir_status in backend.list(path, status=True): data: FlinkStorageFileSerializer = ObjectDict() data.dir_path = origin + os.path.join(path, dir_name) data.storage_type = type_name data.owner = dir_status.get('owner', '') data.modification_time = timestamp_to_datetime_str( dir_status.get('modificationTime', 1000) / 1000) data_list.append(data) except Exception as e: logging.error(trace_msg()) return data_list
def statistic_save(request, statistic_id=0): '''统计保存 ''' statistic_id = int(statistic_id) err_msg = '' if 0 == statistic_id: statistic_id = int(request.GET.get('statistic_id', 0)) if statistic_id > 0: model = Statistic.objects.get(id=statistic_id) else: model = Statistic() try: model.log_type = int(request.POST.get('log_type', '0')) model.field_name = request.POST.get('field_name', '') model.remove_field = request.POST.get('remove_field', '') model.name = request.POST.get('name', '') model.set_attr('name', request.POST.get('name', ''), null=False) model.where = request.POST.get('where', '') model.count_type = int(request.POST.get('count_type', '0')) model.exec_interval = int(request.POST.get('exec_interval', '0')) model.is_save_center = int(request.POST.get('is_save_center', '0')) model.save_table_name = request.POST.get('save_table_name', '') model.last_exec_time = request.POST.get('last_exec_time') model.sql = request.POST.get('sql', '') model.is_auto_execute = int(request.POST.get('is_auto_execute', '0')) model.auto_exec_interval = int( request.POST.get('auto_exec_interval', '0')) model.remark = request.POST.get('remark', '') if model.last_exec_time == '': err_msg = "请输入开始时间" if not err_msg: model.save(using='write') except Exception as e: err_msg = trace_msg() return render(request, 'feedback.html', locals())
def update_main_task_status(self): from ..models.flink_job import FlinkJob, TaskTypes ret = {'status': FlinkJob.Status.Error} if self.flink_job_id and self.flink_job_url and self.flink_job_model.status != self.flink_job_model.Status.Abort: try: base_furl = furl(self.flink_job_url) base_furl.fragment = '' base_furl.path.add('jobs').add(self.flink_job_id) rsp = requests.get(base_furl.url, timeout=3) # flink job status https://ci.apache.org/projects/flink/flink-docs-release-1.12/zh/ops/rest_api.html ret = rsp.json() ret['status'] = ret['state'] if ret['status'] != FlinkJob.Status.Finished: self.status_info[TaskTypes.MainTask].update(ret) except json.decoder.JSONDecodeError as e: pass except KeyError as e: pass except Exception as e: logging.error('%s get main task statue error %s' % (self.flink_job_model.name, trace_msg())) return self.status_info
async def receive_json(self, event: EventDataSer, **kwargs): self.req_id += 1 rsp_event_data = EventDataSer() try: req_event_data = EventDataSer(event) if req_event_data.o.action == ModelEventActions.SUBSCRIBE: req_event_data = ModelEventDataSer(event) for model_name in req_event_data.o.model.split(','): if model_name and self.allow_groups_map.get( model_name, None): await self.join_to_group(req_event_data.o.model) rsp_event_data.o.update(req_event_data.initial_data) except ValidationError as e: rsp_event_data.o.code = 1 rsp_event_data.o.data = e.detail rsp_event_data.o.msg = e.default_code except Exception as e: logger.error(trace_msg()) rsp_event_data.o.code = 1 rsp_event_data.o.msg = str(e.args[0]) rsp_event_data.o.req_id = rsp_event_data.o.req_id or self.req_id await self.send_json(rsp_event_data)
def statistic_execute(request, statistic_id=0, server_id=0): '''执行接口 ''' _r = {"code": -1, "msg": ""} server_id = int(request.REQUEST.get('server_id', '') or 0) statistic_id = int(request.REQUEST.get('statistic_id', '') or 0) sdate = request.REQUEST.get('sdate', '') edate = request.REQUEST.get('edate', '') try: assert sdate and edate and statistic_id, '时间或者统计ID为空!' sdate = convert_to_datetime(sdate) edate = convert_to_datetime(edate) server_ids = [server_id] statistic_ids = [statistic_id] sm = StatisticManager(sdate, edate, statistic_ids, server_ids) err_msgs = sm.start_update()[0] if not err_msgs: _r["code"] = 0 _r["msg"] = '\n'.join(err_msgs) except: _r["msg"] = trace_msg() return JsonResponse(_r)
def query_do(request, query_compiler: QueryCompiler, built_in=False, list_data_handle=None): """执行查询 """ template_name = request.REQUEST.get('template', '') or 'json' now = getattr(request, '_start_time', time.time()) _g = request.REQUEST.get _gl = request.REQUEST.getlist err_msg = '' server_id = int(request.REQUEST.get('server_id', '0')) total_record = total_page = 0 page_num = int(_g('page_num', '') or 0) or 1 # 页码 page_size = int(_g('page_size', '') or 0) or 50 # 页数 session_id = request.REQUEST.get('session_id', '') is_ajax = request.is_ajax() or _g('ajax', False) list_data = [] tfoot_data = [] query_sql = count_sql = '' try: conn = query_compiler.get_conn(server_id) # 设置输入参数 query_compiler.set_mark_parmas(request) # 知道结果的数量情况下,添加_分页,不执行count设置分页 if not query_compiler.query.is_paging: total_record = 10000 page_size = max(page_size, total_record) page_num = 1 # 设置 limit 限制 query_compiler.set_limit(page_size, page_num) # 替换变量,生成 sql query_compiler.query_sql_handle() fields = query_compiler.query.selects # 设置排序 sort_type = _g('sort_type', '') sort_fields_index = int(_g('sort_index', '') or -1) if sort_fields_index >= 0 and len(fields) >= sort_fields_index: sort_field_name = fields[sort_fields_index] sort_key = query_compiler.query.field_config.get( sort_field_name, {}).get('name', '') if sort_key and sort_type: query_compiler.set_order(sort_key, sort_type) count_sql = query_compiler.get_count_sql() query_sql = query_compiler.get_query_sql() # 默认不打印sql if settings.DEBUG or request.REQUEST.get('_sql', ''): print(query_sql) cache_time = int(query_compiler.query.cache_validate) count_sql_key = md5('%s_%s' % (count_sql, server_id)) cursor = conn.cursor() if not total_record: total_record, result_cache_time = cache_func(count_sql_key, get_query_result_cout, (conn, count_sql), timeout=cache_time) if total_record: query_sql_key = md5('%s_%s' % (query_sql, server_id)) total_page = total_record / page_size if total_record % page_size != 0: total_page += 1 list_data, result_cache_time = cache_func(query_sql_key, get_query_result, (conn, query_sql), timeout=cache_time) list_data, result_cache_time = cache_func( 'display_%s' % query_sql_key, query_display_process, (query_compiler, list_data, page_num, page_size), timeout=cache_time) tfoot_sql = query_compiler.get_tfoot_sql() if not query_compiler.query.is_paging: total_record = min(total_record, len(list_data)) if tfoot_sql: tfoot_sql_key = md5('%s_%s' % (tfoot_sql, server_id)) tfoot_data, result_cache_time = cache_func( 'tfoot_%s' % tfoot_sql_key, get_query_result, (conn, tfoot_sql), timeout=cache_time) except Exception as e: err_msg = trace_msg() err_msg = '%s\nthe_sql:%s' % (err_msg, query_sql) logging.error(err_msg) fields = query_compiler.query.selects if list_data_handle and callable(list_data_handle): list_data = list_data_handle(list_data) if built_in: # 导出 return (total_record, list_data) exec_time = '%.3f' % (time.time() - now) response = render(request, 'analysis/return/%s.html' % template_name, locals()) response['Order-Record-Count'] = total_record response['Page-Num'] = page_num response['Page-Size'] = page_size response['Total-Page'] = total_page return response