def exesqlsc(sql, retry=10): # 生产库 project = "production" #神策项目名 token = "your_token_here" #项目Token,在神策原生管理员账号下查看 output_format = "json" #返回数据格式 base_url = "https://your_sensors_domain:8107/api/sql/query" #神策页面的域名 try: req = requests.post(url=base_url, params={ "project": project, "token": token, "q": sql, "format": output_format }) result = req.text except Exception: error = traceback.format_exc() write_to_log(filename='sensorsSQL', defname='exesqlsc', result=sql + error) if retry > 0: retry -= 1 time.sleep(1) return exesqlsc(sql=sql, retry=retry) else: result = error return result
def post_wechat_notification(data): post_data_json = { "touser": data['wechat_openid'], "template_id": data['wechat_template_id'], "data": data['data'] } if 'target_url' in data: post_data_json['url'] = data['target_url'] if "miniprogram" in data: post_data_json['miniprogram'] = { "appid": data['miniprogram_id'], "pagepath": data['miniprogram_pagepath'] } try: result = get_json_from_postjson( url= 'https://api.weixin.qq.com/cgi-bin/message/template/send?access_token=' + getWeChatToken(), data=post_data_json) if result['errcode'] == 0: return 'success' else: return str(result) except Exception: error = traceback.format_exc() write_to_log(filename='wechat', defname='post_wechat_notification', result=error) return 'check_fail_log'
def last_1500_email(args): date = args['date'] days = args['days'] if 'days' in args and args['days'] else 1 count = args['count'] if 'count' in args and args['count'] else 50 timenow = int(time.time()) stauts = 0 index_id = 0 job_id = args['job_id'] try: start_day = get_display_day(start_day=date,calc=1-days) sql_find_user ="""select tvcbook.distinct_id,max(tvcbook.created_at) as max_c from tvcbook left join tvcbook_user on tvcbook.distinct_id=tvcbook_user.distinct_id where tvcbook.date>=DATE_SUB('{start_day}',INTERVAL 1 DAY) and date<=DATE_SUB('{end_day}',INTERVAL 1 DAY) and event = '$pageview' and tvcbook_user.original_id is not null and tvcbook_user.original_id != '' GROUP BY tvcbook.distinct_id order by max_c desc limit {count}""".format(start_day=start_day,end_day=date,count=count) users_result = do_tidb_select(sql=sql_find_user) # print(users_count) total_insert = 0 all_data = {'data_list':[]} for user in users_result[0]: sql_email = """select email,nickname from yii2_user left join yii2_user_profile on yii2_user.id = yii2_user_profile.user_id where uid = '{uid}';""".format(uid=user[0]) sql_email_result,sql_email_count = do_tvcbook_select(sql=sql_email) if sql_email_count>0 and sql_email_result[0][0] is not None and sql_email_result[0][0] !='' and total_insert<count: all_data['data_list'].append({'key':user[0],'enable':9,'json':{'email':sql_email_result[0][0],'nickname':sql_email_result[0][1],'last_active_time':get_time_str(inttime=user[1])}}) total_insert = total_insert + 1 # print(total_insert,user[0],sql_email_result[0][0],sql_email_result[0][1],get_time_str(inttime=user[1])) # print(all_data) status,index_id = insert_usergroup(project='tvcbook',group_id=4301,data=all_data,init_time=timenow,list_desc=date,jobs_id=job_id) return status,index_id except Exception: error = traceback.format_exc() write_to_log(filename='tvcbook', defname='last_1500_email', result=error) return 20,index_id
def disable_single(): #禁用单挑推送 start_time = time.time() password = get_url_params('password') project = get_url_params('project') noti_id = get_url_params('noti_id') if password == admin.admin_password and project and request.method == 'POST' and noti_id: try: result = disable_noti_db(project=project, noti_id=noti_id) time_cost = round(time.time() - start_time, 2) if result[1] > 0: returnjson = { 'result': 'success', 'results_count': result[1], 'timecost': time_cost } return jsonify(returnjson) else: returnjson = { 'result': 'fail', 'results_count': result[1], 'timecost': time_cost } return jsonify(returnjson) except Exception: error = traceback.format_exc() write_to_log(filename='api_noti', defname='manual_send', result=error) returnjson = {'result': 'fail', 'error': error} return jsonify(returnjson)
def send_auto_noti(): miss = 1 #初始化没有命中的次数 while True: missTime = 0 #延迟器,用来降低数据库压力,每次找不到,则增加1秒的重试等待时间。当重试等待超过2分钟后,不再增加重试等待时间。以保证2分钟至少会查询一次。 projects_result, project_count = select_scheduler_enable_project() write_to_log(filename='messenger', defname='send_auto_noti', result='获取启用定时器任务的项目' + (str(project_count) if project_count else '0')) for project in projects_result: result = select_noti_auto(project=project[0]) if result[1] > 0: miss = 1 for noti in result[0]: send_manual(project=project[0], noti=noti) else: miss = miss * 2 missTime = missTime + 1 print(project[0] + '暂无自动消息,miss:' + str(miss)) if missTime == project_count and miss >= 0 and miss <= 120: time.sleep(abs(miss)) elif missTime == project_count and miss > 120: time.sleep(120) elif missTime < project_count: miss = 1
def insert_usergroup(project,group_id,data,list_desc,jobs_id,init_time=int(time.time())): #重新执行该分群 index_id = check_lastest_usergroup_list_index(project=project,group_id=group_id) try: list_init_count = insert_usergroup_list(project=project,group_id=group_id,group_index=index_id+1,status=2,list_desc=list_desc,jobs_id=jobs_id) list_id = check_list_id(project=project, group_id=group_id, group_list_index=index_id+1) data_index = 0 for item in data['data_list']: data_index = data_index + 1 if 'json' in item : item['json']['group_id'] = group_id item['json']['key'] = item['key'] insert_usergroup_data(project=project, group_list_id=list_id[0][0], data_index=data_index, key=item['key'], json=json.dumps(item['json']), enable=item['enable']) insert_usergroup_list(project=project,group_id=group_id,group_index=index_id+1,list_init_date=init_time,status=3,complete_at=0,apply_temple_times=0,item_add=1,created_at=None,updated_at=None,jobs_id=jobs_id) insert_usergroup_list(project=project,group_id=group_id,group_index=index_id+1,status=5,complete_at=int(time.time()),jobs_id=jobs_id) update_usergroup_plan(project=project, plan_id=group_id, latest_data_list_index=index_id+1,updated_at=int(time.time()), repeat_times_add=1, latest_data_time=int(time.time())) return 5,index_id+1 except Exception: error = traceback.format_exc() list_info = check_list_id(project=project, group_id=group_id, group_list_index=index_id+1) if list_info and list_info[0][1]>0: insert_usergroup_list(project=project,group_id=group_id,group_index=index_id+1,status=4,complete_at=int(time.time()),jobs_id=jobs_id) write_to_log(filename='etl_model',defname='insert_usergroup',result=error) return 4,index_id+1 else : insert_usergroup_list(project=project,group_id=group_id,group_index=index_id+1,status=6,complete_at=int(time.time()),jobs_id=jobs_id) write_to_log(filename='etl_model',defname='insert_usergroup',result=error) return 6,0
def create_auto_group(): projects_result,project_count = select_scheduler_enable_project() write_to_log(filename='scheduler', defname='create_auto_group', result='获取项目列表'+ (str(project_count) if project_count else '0')) for project in projects_result: result_auto_noti = select_auto_temple_apply_plan(project=project[0]) # print(result_auto_noti) for data_list in result_auto_noti[0]: # print(data_list) func_data = json.loads(data_list[2]) # print(func_data) id_list = [] if "default_temple" in func_data and type(func_data["default_temple"]) is list: for temple_id in func_data["default_temple"]: id_list.append(temple_id) elif "default_temple" in func_data and type(func_data["default_temple"]) is int: id_list.append(func_data["default_temple"]) for tid in id_list: send_at = [] result_temple = select_noti_temple(project=project[0],temple_id=tid) if result_temple[1]>0 and result_temple[0][0][2]: args = json.loads(result_temple[0][0][2]) if 'meta' in args and 'default_send_time' in args['meta']: print(args['meta']['default_send_time']) send_at_list = get_next_time(timer = args['meta']['default_send_time'],current_time = int(time.time())) # org_data_time_list = get_next_time(timer = args['meta']['default_send_time'],current_time=data_list[3]) # if send_at_list == send_at_list : for time_return in send_at_list: send_at.append(time_return['time_int']) elif len(send_at)==0: send_at.append(int(time.time())) for send_int in send_at: create_noti_group(project=project[0],temple_id=tid,user_group_id=data_list[1],data_id=None,owner='create_auto_group',send_at=send_int)
def play_all(self): try: self.sample() if self.project and self.project == 'sample': self.sample() except Exception: error = traceback.format_exc() write_to_log(filename='trigger', defname='play_all', result=error)
def show_usergroup_data(): #查询分群内容 start_time = time.time() password = get_url_params('password') project = get_url_params('project') list_id = get_url_params('list_id') length = get_url_params('length') page = get_url_params('page') everywhere = get_url_params('everywhere') length = int(length) if length else 500 page = int(page) if page else 1 if password == admin.admin_password and project and request.method == 'POST' and list_id: #只有正确的密码才能触发动作 add_on_where = f'''and concat({project}_usergroup_data.data_key,{project}_usergroup_data.data_json) like "%{everywhere}%"''' if everywhere and everywhere != '' and everywhere != ' ' else '' try: results = select_usergroup_data_for_api(project=project, list_id=list_id, length=length, page=page, everywhere=add_on_where) resultscount = select_usergroup_datacount_for_api( project=project, list_id=list_id, length=length, page=page, everywhere=add_on_where) temp_json = [] for item in results[0]: temp_json.append({ "group_id": item[0], "list_id": item[1], "data_id": item[2], "data_index": item[3], "data_key": item[4], "data_json": json.loads(item[5]), "enable_policy_id": item[6], "enable_policy_name": item[7] }) time_cost = round(time.time() - start_time, 2) total_count = resultscount[0][0][0] if resultscount[1] > 0 else 0 returnjson = { 'result': 'success', 'results_count': results[1], 'timecost': time_cost, 'data': temp_json, 'total_count': total_count, 'page': page, 'length': length } return jsonify(returnjson) except Exception: error = traceback.format_exc() write_to_log(filename='api_noti', defname='show_usergroup_data', result=error) returnjson = {'result': 'fail', 'error': error} return jsonify(returnjson)
def show_temples(): #查询模板列表 start_time = time.time() password = get_url_params('password') project = get_url_params('project') mode = get_url_params('mode') if password == admin.admin_password and project and request.method == 'POST': #只有正确的密码才能触发动作 # remark = request.form.get('remark',None)k+'\'' try: results = show_temples_db(project=project) temp_json = [] for item in results[0]: if mode and mode == 'cli': temp_json.append({ "temple_id": item[0], "temple_name": item[1], "temple_desc": item[2], "apply_times": item[5], "lastest_apply_time": item[6], "lastest_apply_list_desc": item[8], "lastest_apply_group_name": item[9], "created_at": item[10], "updated_at": item[11] }) else: temp_json.append({ "temple_id": item[0], "temple_name": item[1], "temple_desc": item[2], "temple_args": json.loads(item[3]), "temple_content": json.loads(item[4]), "apply_times": item[5], "lastest_apply_time": item[6], "lastest_apply_list_id": item[7], "lastest_apply_list_desc": item[8], "lastest_apply_group_name": item[9], "created_at": item[10], "updated_at": item[11] }) time_cost = round(time.time() - start_time, 2) returnjson = { 'result': 'success', 'results_count': results[1], 'timecost': time_cost, 'data': temp_json } # print(returnjson) return jsonify(returnjson) except Exception: error = traceback.format_exc() write_to_log(filename='api_noti', defname='show_temples', result=error) returnjson = {'result': 'fail', 'error': error} return jsonify(returnjson)
def get_json_from_postjson(url, data): headers = {'User-agent': admin.who_am_i} try: req = requests.post(url=url, headers=headers, json=data) result = req.json() return result except Exception: error = traceback.format_exc() write_to_log(filename='api_req', defname='get_json_from_postjson', result=error)
def recall_baidu_bdvid(uid, project, newType=99, convertValue=0,token="your_token_here"): try: timenow = int(time.time()) sql_check_all_did = """select if(original_id='',distinct_id,original_id)as did from {project}_user where distinct_id='{uid}' GROUP BY did""".format( uid=uid, project=project) all_did, did_count = select_tidb(sql=sql_check_all_did) did_list = [] for did in all_did: did_list.append("'"+did[0]+"'") did_str = (',').join(did_list) sql_find_last_bdvid = """select date,created_at,distinct_id,SUBSTRING_INDEX(SUBSTRING_INDEX(JSON_EXTRACT(all_json, '$."properties"."$url"'),'bd_vid=',-1),'"',1)as bdvid,JSON_EXTRACT(all_json, '$."properties"."$url"'),all_json from {project} where distinct_id in ({dids}) and `event`='$pageview' and JSON_EXTRACT(all_json, '$."properties"."$url"') like '%bd_vid%' having LENGTH(bdvid)>0 ORDER BY created_at desc limit 1""".format( dids=did_str, project=project) print(sql_find_last_bdvid) bdvid_result, bdvid_count = select_tidb(sql=sql_find_last_bdvid) if bdvid_count > 0: latest_date = bdvid_result[0][0] latest_created_at = bdvid_result[0][1] latest_distinct_id = bdvid_result[0][2] latest_bdvid = bdvid_result[0][3] latest_url = json.loads(bdvid_result[0][4]) jsondata = json.loads(bdvid_result[0][5]) req_data = {"token": token, "conversionTypes": [ { "logidUrl": latest_url, "newType": newType }] } if convertValue > 0: req_data['conversionTypes'][0]['convertValue'] = convertValue # print(req_data) json_result = get_json_from_postjson( url='https://ocpc.baidu.com/ocpcapi/api/uploadConvertData', data=req_data) all_json = {} all_json['req_data'] = req_data all_json['bdvid_result'] = jsondata all_json['latest_date'] = str(latest_date) all_json['latest_created_at'] = latest_created_at all_json['latest_distinct_id'] = latest_distinct_id all_json['latest_bdvid'] = latest_bdvid all_json['latest_url'] = latest_url all_json['recall_result'] = json_result all_json['all_did'] = did_list insert_count = insert_event(table=project, alljson=json.dumps(all_json, ensure_ascii=False), track_id=0, distinct_id=uid.replace('"', ''), lib='ghost_sa', event='$is_channel_callback_event', type_1='ghost_sa_func', User_Agent=None, Host=None, Connection=None, Pragma=None, Cache_Control=None, Accept=None, Accept_Encoding=None, Accept_Language=None, ip=None, ip_city=None, ip_asn=None, url=None, referrer=None, remark='normal', ua_platform=None, ua_browser=None, ua_version=None, ua_language=None, created_at=timenow) # print(all_json) # print(json.dumps(all_json,ensure_ascii=False)) except Exception: error = traceback.format_exc() write_to_log(filename='sample',defname='recall_baidu_bdvid',result=sql_find_last_bdvid+error)
def create_scheduler_jobs_manual(): #手动创建分群任务 project = get_url_params('project') plan_id = get_url_params('plan_id') send_at = get_url_params('send_at') if get_url_params('send_at') else int( time.time()) password = get_url_params('password') if password == admin.admin_password and request.method == 'POST': try: count = 0 plan_result, plan_count = select_usergroup_jobs_plan_manual( project=project, plan_id=plan_id) for plan in plan_result: times = get_next_time(current_time=int(send_at)) for time_1 in times: insert_result, insert_count = insert_scheduler_job( project=project, group_id=plan[0], datetime=time_1['time_int'], data={ 'datetime_int': time_1['time_int'], 'datetime_tuple': time_1['time_tuple'], 'datetime': time.strftime("%Y-%m-%d %H:%M:%S", time_1['time_tuple']), 'date': time.strftime("%Y-%m-%d", time_1['time_tuple']), 'func': json.loads(plan[1]) }, priority=plan[3] if plan[3] else 13, status=16) write_to_log( filename='api_noti', defname='create_scheduler_jobs_manual', result='项目' + str(project) + '计划' + str(plan[0]) + '已添加时间' + time.strftime( "%Y-%m-%d %H:%M:%S", time_1['time_tuple'])) count = count + insert_count returnjson = {'result': 'success', 'insert_count': count} return jsonify(returnjson) except Exception: error = traceback.format_exc() write_to_log(filename='api_noti', defname='create_scheduler_jobs_manual', result=error) returnjson = {'result': 'fail', 'error': error} return jsonify(returnjson)
def do_tidb_select(sql, presql=None, args=None, retrycount=5): # 查询库 try: results, result_count = select_tidb(sql=sql, args=args, presql=presql) return results, result_count except Exception: error = traceback.format_exc() write_to_log(filename='db_op', defname='do_tidb_select', result=error) if retrycount > 0: retrycount -= 1 time.sleep(1) return do_tidb_select(sql=sql, args=args, retrycount=retrycount) else: return 'sql_err', 0
def play_all(self): try: print(self.noti_type) if self.noti_type == 23: return self.via_email() elif self.noti_type == 24: return self.sms() elif self.noti_type == 29: return self.wechat_official_account() except Exception: error = traceback.format_exc() write_to_log(filename='messenger', defname='play_all', result=error) return 'failed,please check logs'
def manual_send(): #手动推送信息 start_time = time.time() password = get_url_params('password') project = get_url_params('project') noti_group_id = get_url_params('noti_group_id') noti_id = get_url_params('noti_id') if password == admin.admin_password and project and request.method == 'POST': pending_noti = [] pending_return = [] status_list = [8, 9, 24, 28] #选择推送那种类型的信息9手动24手动应用28手动推送 try: if noti_group_id and not noti_id: for status in status_list: noti_list = select_noti_group( project=project, noti_group_id=int(noti_group_id), status=status) for item in noti_list[0]: pending_noti.append(item) elif noti_id: for status in status_list: noti_list = select_noti_single(project=project, noti_id=int(noti_id), status=status) for item in noti_list[0]: pending_noti.append(item) for noti_item in pending_noti: pending_return.append( send_manual(project=project, noti=noti_item)) time_cost = round(time.time() - start_time, 2) returnjson = { 'result': 'success', 'results_count': len(pending_noti), 'timecost': time_cost, 'data': pending_return } return jsonify(returnjson) except Exception: error = traceback.format_exc() write_to_log(filename='api_noti', defname='manual_send', result=error) returnjson = {'result': 'fail', 'error': error} return jsonify(returnjson)
def send_email( to_addr='*****@*****.**', from_addr=None, subject='鬼策测试邮件标题', html="""<p>鬼策测试邮件正文</p><p><a href="https://github.com/white-shiro-bai/ghost_sa/">这是一个测试链接</a></p>""" ): #使用Python发送HTML格式的邮件 message = MIMEText(html, 'html', 'utf-8') message['From'] = Header(from_addr, 'utf-8') #括号里的对应发件人邮箱昵称(随便起)、发件人邮箱账号 message['To'] = Header(to_addr, 'utf-8') #括号里的对应收件人邮箱昵称、收件人邮箱账号 message['Subject'] = Header(subject, 'utf-8') try: smtpObj = smtplib.SMTP_SSL(host=email.mail_host, port=email.mail_port) smtpObj.login(email.mail_user, email.mail_pass) smtpObj.sendmail(from_addr, to_addr, message.as_string()) return 'success' except smtplib.SMTPException as e: write_to_log(filename='email', defname='send_email', result=str(e)) return str(e)
def create_manual_temple_noti(): #外部触发模板消息 project = get_url_params('project') temple_id = get_url_params('temple_id') send_at = get_url_params('send_at') if get_url_params('send_at') else int( time.time()) password = get_url_params('password') owner = get_url_params('owner') data = get_url_params('data') if password == admin.admin_password and request.method == 'POST' and owner and owner != '' and project: try: data_jsons = json.loads(data) data_list = [] for item in data_jsons: print(item) if 'distinct_id' in item and 'data_json' in item: result_temple = select_noti_temple(project=project, temple_id=temple_id) result = apply_temple( project=project, temple_args=json.loads(result_temple[0][0][2]), temple_content=json.loads(result_temple[0][0][3]), data_json=item['data_json'], data_key=item['distinct_id'], send_at=send_at, group_id=None, owner=owner) data_list.append(result) result_insert = create_non_usergroup_noti( args={ 'owner': owner, 'temple_id': temple_id, 'project': project, 'data': data_list }) return jsonify(result_insert) except Exception: error = traceback.format_exc() write_to_log(filename='api_noti', defname='create_manual_temple_noti', result=error) returnjson = {'result': 'fail', 'error': error} return jsonify(returnjson)
def get_suoim_short_url(long_url): req = requests.get(url=suoim_shorten_url, params={ 'key': suoim_key, 'format': 'json', 'url': long_url }) result = req.json() # print(type(result['err'])) if result['err'] is None or result['err'] == '': short_url = result['url'] return short_url, 'ok' # print(short_url) else: # print(result['err']) return '', 'fail' write_to_log(filename='shorturl', defname='get_suoim_short_url', result=result['err'])
def get_task_day(): projects_result,project_count = select_scheduler_enable_project() write_to_log(filename='scheduler', defname='get_task_day', result='获取启用定时器任务的项目'+ (str(project_count) if project_count else '0')) for project in projects_result: plan_result,plan_count = select_usergroup_jobs_plan(project=project[0]) write_to_log(filename='scheduler', defname='get_task_day', result='查询到项目'+project[0]+'含有可用计划'+ (str(plan_count) if plan_count else '0')) for plan in plan_result: times = get_next_time(timer = plan[2],current_time = int(time.time())) for time_1 in times: func_loads = json.loads(plan[1]) func_loads['args']['noti_status'] = plan[4] insert_scheduler_job(project = project[0],group_id = plan[0],datetime = time_1['time_int'],data = {'datetime_int':time_1['time_int'],'datetime_tuple':time_1['time_tuple'],'datetime':time.strftime("%Y-%m-%d %H:%M:%S", time_1['time_tuple']),'date':time.strftime("%Y-%m-%d", time_1['time_tuple']),'func':func_loads},priority=plan[3] if plan[3] else 13,status=16) write_to_log(filename = 'scheduler', defname = 'get_task_day', result = '项目'+str(project[0])+'计划'+str(plan[0])+'已添加时间'+time.strftime("%Y-%m-%d %H:%M:%S", time_1['time_tuple'])) write_to_log(filename = 'scheduler', defname = 'get_task_day', result = '项目'+str(project[0])+'计划'+str(plan[0])+'已添加计划条目'+str(len(times)))
def do_tidb_exe(sql, presql=None, args=None, retrycount=5): # 执行库 if "update" in sql.lower() and "where" not in sql.lower(): write_to_log(filename='db_op', defname='do_tidb_exe', result=sql + str(args) + 'update必须包含where条件才能执行') return 'update必须包含where条件才能执行', 0, 0 elif "delete" in sql.lower() and "where" not in sql.lower(): write_to_log(filename='db_op', defname='do_tidb_exe', result=sql + str(args) + 'delete必须包含where条件才能执行') return 'delete必须包含where条件才能执行', 0, 0 else: try: results, result_count, lastest_id = exe_tidb(sql=sql, args=args, presql=presql) return results, result_count, lastest_id except Exception: error = traceback.format_exc() write_to_log(filename='db_op', defname='do_tidb_exe', result=sql + str(args) + error) if retrycount > 0: retrycount -= 1 time.sleep(1) return do_tidb_exe(sql=sql, args=args, retrycount=retrycount) else: return 'sql_err', 0, 0
def disable_usergroup_data(): #禁用单条分群数据 start_time = time.time() password = get_url_params('password') project = get_url_params('project') data_id = get_url_params('data_id') if password == admin.admin_password and project and request.method == 'POST' and data_id: #只有正确的密码才能触发动作 # remark = request.form.get('remark',None)k+'\'' try: results = disable_usergroup_data_db(project=project, data_id=data_id) temp_json = [] time_cost = round(time.time() - start_time, 2) if results[1] > 0: returnjson = { 'result': 'success', 'results_count': results[1], 'timecost': time_cost, 'data': temp_json, "disable_data_id": data_id } else: returnjson = { 'result': 'fail', 'results_count': results[1], 'timecost': time_cost, 'data': temp_json, "error": "没有修改任何内容" } # print(returnjson) return jsonify(returnjson) except Exception: error = traceback.format_exc() write_to_log(filename='api_noti', defname='disable_usergroup_data', result=error) returnjson = {'result': 'fail', 'error': error} return jsonify(returnjson)
def create_manual_non_temple_noti(): #外部触发非模板消息 project = get_url_params('project') send_at = get_url_params('send_at') if get_url_params('send_at') else int( time.time()) password = get_url_params('password') medium_id = get_url_params('medium_id') owner = get_url_params('owner') data = get_url_params('data') if password == admin.admin_password and request.method == 'POST' and owner and owner != '' and project: try: data_jsons = json.loads(data) data_list = [] for item in data_jsons: if 'send_tracker' in item and 'distinct_id' in item[ 'send_tracker'] and item['send_tracker'][ 'distinct_id'] != '': item['distinct_id'] = item['send_tracker']['distinct_id'] item['send_at'] = send_at data_list.append(item) elif 'distinct_id' in item and item['distinct_id'] != '': item['send_at'] = send_at data_list.append(item) result = create_non_usergroup_non_temple_noti( args={ 'owner': owner, 'project': project, 'data': data_list, 'medium_id': medium_id }) return jsonify(result) except Exception: error = traceback.format_exc() write_to_log(filename='api_noti', defname='create_manual_temple_noti', result=error) returnjson = {'result': 'fail', 'error': error} return jsonify(returnjson)
def duplicate_scheduler_jobs(): #重新执行分群 start_time = time.time() password = get_url_params('password') project = get_url_params('project') list_id = get_url_params('list_id') if password == admin.admin_password and project and request.method == 'POST' and list_id: #只有正确的密码才能触发动作 # remark = request.form.get('remark',None)k+'\'' try: results = duplicate_scheduler_jobs_sql(project=project, list_id=list_id) temp_json = [] time_cost = round(time.time() - start_time, 2) if results[1] > 0: returnjson = { 'result': 'success', 'results_count': results[1], 'timecost': time_cost, 'data': temp_json, "added_id": results[2] } else: returnjson = { 'result': 'fail', 'results_count': results[1], 'timecost': time_cost, 'data': temp_json, "error": "该列表不是由系统创建的,不支持重做" } # print(returnjson) return jsonify(returnjson) except Exception: error = traceback.format_exc() write_to_log(filename='api_noti', defname='duplicate_scheduler_jobs', result=error) returnjson = {'result': 'fail', 'error': error} return jsonify(returnjson)
def apply_temples_list(): #应用模板到分群列表 start_time = time.time() password = get_url_params('password') project = get_url_params('project') temple_id = get_url_params('temple_id') user_group_id = get_url_params('user_group_id') data_id = get_url_params('data_id') owner = get_url_params('owner') send_at = get_url_params('send_at') send_at = int(send_at) if send_at else int(time.time()) if password == admin.admin_password and project and owner and request.method == 'POST': #只有正确的密码才能触发动作 try: results = create_noti_group( project=project, temple_id=temple_id, user_group_id=user_group_id, data_id=data_id, owner=owner, send_at=send_at) #实际开始应用模板(单条数据和分群列表同时存在时,使用单条数据) temp_json = [] time_cost = round(time.time() - start_time, 2) returnjson = { 'result': results, 'timecost': time_cost, 'data': temp_json } # print(returnjson) return jsonify(returnjson) except Exception: error = traceback.format_exc() write_to_log(filename='api_noti', defname='apply_temples_list', result=error) returnjson = {'result': 'fail', 'error': error} return jsonify(returnjson)
def do_all_task(): task_count = 0 start_time = int(time.time()) miss = 1 while task_count < 1 and int(time.time())-start_time <=1800: #连续30分钟拿不到任务,就退出,会重新搜索plan。 priority = get_priority() task_result,task_count = check_next_scheduler_job(priority=priority) write_to_log(filename='scheduler', defname='do_all_task', result='查询优先级'+str(priority)+'获取任务数'+str(task_count)) if task_count == 0 and priority == 13 : #如果遇到低优先级无任务,则休息5分钟以减少数据库请求的次数。此处没做低优先级没命中继续上查高优先级如果也没有再休息的功能。如果写了,效率可以更高,按照高优先级,中优先级,低优先级1:1:1的比例排任务,最高每1000次任务执行能减少10分钟的等待时间。 miss = miss * 2 #延迟器,用来降低数据库压力,每次找不到,则增加1秒的重试等待时间。当重试等待超过5分钟后,不再增加重试等待时间。以保证5分钟至少会查询一次。 if miss >=0 and miss <= 300: time.sleep(abs(miss)) elif miss > 300: time.sleep(300) print('miss记数器',miss) elif task_count >=1: miss = 1 data = json.loads(task_result[0][4]) data['group_id'] = task_result[0][2] update_scheduler_job(jobid=task_result[0][0],status=17) py = importlib.import_module(data['func']['dir']) ff = getattr(py, data['func']['name']) # print(data['func']['args']) update_scheduler_job(jobid=task_result[0][0],status=18) for arg in data['func']['args']: if type(data['func']['args'][arg]) is str and '___' in data['func']['args'][arg]: data['func']['args'][arg] = data[data['func']['args'][arg].replace('___','')] # print(data['func']['args']) data['func']['args']['job_id']=task_result[0][0] data['func']['args']['group_id'] = task_result[0][2] try: write_to_log(filename='scheduler', defname='do_all_task', result='优先级'+str(priority)+'任务id'+str(task_result[0][0])+'拼接的任务参数'+str(data)+'开始执行') func_result,list_index = ff(data['func']['args']) update_scheduler_job(jobid=task_result[0][0],list_index=list_index,status=19) write_to_log(filename='scheduler', defname='do_all_task', result='优先级'+str(priority)+'任务id'+str(task_result[0][0])+'拼接的任务参数'+str(data)+'执行完毕') except Exception: error = traceback.format_exc() write_to_log(filename='scheduler', defname='do_all_task', result=error) update_scheduler_job(jobid=task_result[0][0],status=21) task_count = task_count - 1
def upload_users_from_pickle_to_sql(project='tvcbook', remark='production'): dirpath = os.path.join('data_export', project, remark, 'users') filepath = os.path.join(dirpath, 'users_all.pkl') with open(filepath, "rb") as f2: results = pickle._loads(f2.read()) a = 1 for item in results: print(a) a += 1 # print('a',item) data_rebuild = { "properties": {}, "lib": {}, "distinct_id": "", "event": "", "type": "profile_set" } # print(item) try: item = json.loads(item) # print(item["first_id"]) if len(item["first_id"]) == 16: data_rebuild['lib']['$lib'] = 'js' elif len(item['first_id']) >= 39 and len(item['first_id']) <= 46: data_rebuild['lib']['$lib'] = 'MiniProgram' elif len(item['first_id']) >= 51 and len(item['first_id']) <= 64: data_rebuild['lib']['$lib'] = 'js' else: data_rebuild['lib']['$lib'] = 'unknow' if 'second_id' in item: data_rebuild["distinct_id"] = item['second_id'] data_rebuild["map_id"] = item['first_id'] data_rebuild["original_id"] = item['first_id'] if 'userid' in item: # data_rebuild["properties"]["user_id"] = item['userid'] data_rebuild["properties"]["userId"] = item['userid'] if 'name' in item: data_rebuild["properties"]["name"] = item['name'] if 'realname' in item: data_rebuild["properties"]["realname"] = item['realname'] if 'sex' in item: data_rebuild["properties"]["sex"] = item['sex'] if 'verification_type' in item: data_rebuild["properties"]["verification_type"] = item[ 'verification_type'] if 'company' in item: data_rebuild["properties"]["company"] = item['company'] # print(item) else: data_rebuild["distinct_id"] = item['first_id'] # data_rebuild["map_id"] = item['first_id'] # data_rebuild["original_id"] = item['first_id'] if 'userid' in item: # data_rebuild["properties"]["user_id"] = item['userid'] data_rebuild["properties"]["userId"] = item['userid'] if 'name' in item: data_rebuild["properties"]["name"] = item['name'] if 'realname' in item: data_rebuild["properties"]["realname"] = item['realname'] if 'sex' in item: data_rebuild["properties"]["sex"] = item['sex'] if 'verification_type' in item: data_rebuild["properties"]["verification_type"] = item[ 'verification_type'] if 'company' in item: data_rebuild["properties"]["company"] = item['company'] if 'viptype' in item: data_rebuild["properties"]["viptype"] = item['viptype'] print(data_rebuild) insert_user(project='tvcbook', data_decode=data_rebuild, created_at=0) except Exception: error = traceback.format_exc() write_to_log(filename='import_from_sa', defname='upload_users_from_pickle_to_sql', result=error) os.remove(filepath)
def do_insert(msg): try: group = json.loads( msg.value.decode('utf-8'))['group'] if "group" in json.loads( msg.value.decode('utf-8')) else None data = json.loads(msg.value.decode('utf-8'))['data'] offset = msg.offset print(offset) if group == 'event_track': # print(data['project']) insert_data(project=data['project'], data_decode=data['data_decode'], User_Agent=data['User_Agent'], Host=data['Host'], Connection=data['Connection'], Pragma=data['Pragma'], Cache_Control=data['Cache_Control'], Accept=data['Accept'], Accept_Encoding=data['Accept_Encoding'], Accept_Language=data['Accept_Language'], ip=data['ip'], ip_city=data['ip_city'], ip_asn=data['ip_asn'], url=data['url'], referrer=data['referrer'], remark=data['remark'], ua_platform=data['ua_platform'], ua_browser=data['ua_browser'], ua_version=data['ua_version'], ua_language=data['ua_language'], ip_is_good=data['ip_is_good'], ip_asn_is_good=data['ip_asn_is_good'], created_at=data['created_at'], updated_at=data['updated_at'], use_kafka=False) elif group == 'installation_track': insert_installation_track(project=data['project'], data_decode=data['data_decode'], User_Agent=data['User_Agent'], Host=data['Host'], Connection=data['Connection'], Pragma=data['Pragma'], Cache_Control=data['Cache_Control'], Accept=data['Accept'], Accept_Encoding=data['Accept_Encoding'], Accept_Language=data['Accept_Language'], ip=data['ip'], ip_city=data['ip_city'], ip_asn=data['ip_asn'], url=data['url'], referrer=data['referrer'], remark=data['remark'], ua_platform=data['ua_platform'], ua_browser=data['ua_browser'], ua_version=data['ua_version'], ua_language=data['ua_language'], ip_is_good=data['ip_is_good'], ip_asn_is_good=data['ip_asn_is_good'], created_at=data['created_at'], updated_at=data['updated_at'], use_kafka=False) elif group == 'shortcut_history': insert_shortcut_history(short_url=data['short_url'], result=data['status'], cost_time=data['time2'], ip=data['ip'], user_agent=data['user_agent'], accept_language=data['accept_language'], ua_platform=data['ua_platform'], ua_browser=data['ua_browser'], ua_version=data['ua_version'], ua_language=data['ua_language'], created_at=data['created_at']) elif group == 'shortcut_read': insert_shortcut_read(short_url=data['short_url'], ip=data['ip'], user_agent=data['user_agent'], accept_language=data['accept_language'], ua_platform=data['ua_platform'], ua_browser=data['ua_browser'], ua_version=data['ua_version'], ua_language=data['ua_language'], referrer=data['referrer'], created_at=data['created_at']) else: insert_data(project=data['project'], data_decode=data['data_decode'], User_Agent=data['User_Agent'], Host=data['Host'], Connection=data['Connection'], Pragma=data['Pragma'], Cache_Control=data['Cache_Control'], Accept=data['Accept'], Accept_Encoding=data['Accept_Encoding'], Accept_Language=data['Accept_Language'], ip=data['ip'], ip_city=data['ip_city'], ip_asn=data['ip_asn'], url=data['url'], referrer=data['referrer'], remark=data['remark'], ua_platform=data['ua_platform'], ua_browser=data['ua_browser'], ua_version=data['ua_version'], ua_language=data['ua_language'], ip_is_good=data['ip_is_good'], ip_asn_is_good=data['ip_asn_is_good'], created_at=data['created_at'], updated_at=data['updated_at'], use_kafka=False) except Exception: error = traceback.format_exc() write_to_log(filename='kafka_consumer', defname='do_insert', result=error)
def upload_events_from_pickle_to_sql(project='tvcbook', remark='production'): #所有的文件 filelist = [] dirpath = os.path.join('data_export', project, remark, 'events') for maindir, subdir, file_name_list in os.walk(dirpath): # print("1:",maindir) #当前主目录 # print("2:",subdir) #当前主目录下的所有目录 # print("3:",str(file_name_list)) #当前主目录下的所有文件 # file_name_list.sort() # subdir.sort() for filename in file_name_list: apath = os.path.join(maindir, filename) #合并成一个完整路径 filelist.append(apath) # print(file_name_list) filelist.sort() # print(filelist) for pkl in filelist: # print(pkl) with open(pkl, "rb") as f2: results = pickle._loads(f2.read()) # p = multiprocessing.Pool(processes = 3) for item in results: # # print(item) try: itemdict = json.loads(item) all_json = { "properties": itemdict, "distinct_id": itemdict["distinct_id"], "event": itemdict["event"], "type": "track" } # first_id = itemdict['first_id'] if 'first_id' in itemdict else None # second_id = itemdict['second_id'] if 'second_id' in itemdict else None # unionid = itemdict['unionid'] if 'unionid' in itemdict else None # id = itemdict['id'] if 'id' in itemdict else None ip_city, ip_is_good = get_addr(itemdict["$ip"]) ip_asn, ip_asn_is_good = get_asn(itemdict["$ip"]) if ip_is_good == 0: ip_city = '{}' if ip_asn_is_good == 0: ip_asn = '{}' print(all_json) created_at = time.mktime( time.strptime(itemdict["time"].split('.')[0], '%Y-%m-%d %H:%M:%S')) # all_json = json.dumps(itemdict,ensure_ascii=False) insert_data(project='tvcbook', data_decode=all_json, User_Agent=None, Host=None, Connection=None, Pragma=None, Cache_Control=None, Accept=None, Accept_Encoding=None, Accept_Language=None, ip=itemdict["$ip"] if "$ip" in itemdict else None, ip_city=ip_city, ip_asn=ip_asn, url=None, referrer=itemdict["$referrer"] if "$referrer" in itemdict else None, remark=remark, ua_platform=itemdict["$lib"] if "$lib" in itemdict else None, ua_browser=itemdict["$browser"] if "$browser" in itemdict else None, ua_version=itemdict["$browser_version"] if "$browser_version" in itemdict else None, ua_language=None, ip_is_good=ip_is_good, ip_asn_is_good=ip_asn_is_good, created_at=created_at) except Exception: error = traceback.format_exc() write_to_log(filename='import_from_sa', defname='upload_events_from_pickle_to_sql', result=error) # f2.close() # p.apply_async(func=insert_data,kwds={ # "project":"tvcbook", # "data_decode":all_json, # "User_Agent":None, # "Host":None, # "Connection":None, # "Pragma":None, # "Cache_Control":None, # "Accept":None, # "Accept_Encoding":None, # "Accept_Language":None, # "ip":itemdict["$ip"] if "$ip" in itemdict else None, # "ip_city":ip_city, # "ip_asn":ip_asn, # "url":None, # "referrer":itemdict["$referrer"] if "$referrer" in itemdict else None, # "remark":'production', # "ua_platform":itemdict["$lib"] if "$lib" in itemdict else None, # "ua_browser":itemdict["$browser"] if "$browser" in itemdict else None, # "ua_version":itemdict["$browser_version"] if "$browser_version" in itemdict else None, # "ua_language":None, # "ip_is_good":ip_is_good, # "ip_asn_is_good":ip_asn_is_good, # "created_at":created_at}) # # insert_data # p.close() # p.join() os.remove(pkl)
def show_usergroup_list(): #查询计划下的用户分群列表 start_time = time.time() password = get_url_params('password') project = get_url_params('project') plan_id = get_url_params('plan_id') mode = get_url_params('mode') if password == admin.admin_password and project and request.method == 'POST' and plan_id: #只有正确的密码才能触发动作 # remark = request.form.get('remark',None)k+'\'' try: results = show_project_usergroup_list(project=project, plan_id=plan_id) temp_json = [] for item in results[0]: if mode and mode == 'cli': temp_json.append({ "list_id": item[0], "group_id": item[1], "group_title": item[2], "group_list_index": item[3], "list_init_date": item[4], "list_desc": item[5], "jobs_id": item[6], "item_count": item[7], "status_name": item[9], "complete_at": item[10], "apply_temple_times": item[11], "created_at": item[12], "updated_at": item[13] }) else: temp_json.append({ "list_id": item[0], "group_id": item[1], "group_title": item[2], "group_list_index": item[3], "list_init_date": item[4], "list_desc": item[5], "jobs_id": item[6], "item_count": item[7], "status_id": item[8], "status_name": item[9], "complete_at": item[10], "apply_temple_times": item[11], "created_at": item[12], "updated_at": item[13] }) time_cost = round(time.time() - start_time, 2) returnjson = { 'result': 'success', 'results_count': results[1], 'timecost': time_cost, 'data': temp_json } # print(returnjson) return jsonify(returnjson) except Exception: error = traceback.format_exc() write_to_log(filename='api_noti', defname='show_usergroup_list', result=error) returnjson = {'result': 'fail', 'error': error} return jsonify(returnjson)