def ajax_create_interfere_task(): # 创建成功返回["1"] # 创建失败返回["0"]:提示有重复任务存在 finish = ["0"] task_name = request.args.get('task_name', '') # pinyin_task_name = pinyin.get(task_name.encode('utf-8'), format='strip', delimiter="_") submit_user = request.args.get('submit_user', '*****@*****.**') current_ts = int(time.time()) submit_time = request.args.get('submit_time', current_ts) # start_time = request.args.get('start_time', "") # stop_time = request.args.get('stop_time', "") # remark = request.args.get('remark', '') must_keywords = request.args.get('must_keywords', '') # &&& should_keywords = request.args.get('should_keywords', '') # sti_during = request.args.get("interfer_during", 3600) task_detail = dict() task_detail["stimulation_during"] = sti_during task_detail["task_name"] = task_name task_detail['pinyin_task_name'] = pinyin_task_name task_detail["submit_user"] = submit_user task_detail["stop_time"] = int(stop_time) task_detail["start_time"] = int(start_time) task_detail["update_time"] = int(submit_time) task_detail["remark"] = remark task_detail["must_keywords"] = must_keywords task_detail["should_keywords"] = should_keywords task_detail["submit_time"] = int(submit_time) task_detail["finish"] = "0" task_detail["scan_text_time"] = datehour2ts(ts2datehour( float(submit_time))) task_detail["scan_text_processing"] = "0" # 是否正在复制微博文本 task_detail["interfere_processing_status"] = "0" task_detail["stimulation_processing_status"] = "0" task_detail["interfere_finish"] = "0" task_detail["stimulation_finish"] = "0" task_detail["scan_text_finish"] = "0" #print '62:::::::::' exist_task = es_prediction.exists(index=index_manage_interfere_task, doc_type=type_manage_interfere_task, id=pinyin_task_name) #print '64:::::',exist_task if not exist_task: es_prediction.index(index=index_manage_interfere_task, doc_type=type_manage_interfere_task, id=pinyin_task_name, body=task_detail) finish = ["1"] # finish 0 已经存在 1 提交成功 return json.dumps(finish)
def ajax_create_prediction_task(): finish = ["0"] task_name = request.args.get('task_name', '') pinyin_task_name = pinyin.get(task_name.encode('utf-8'), format='strip', delimiter="_") submit_user = request.args.get('submit_user', '*****@*****.**') current_ts = int(time.time()) submit_time = request.args.get('submit_time', current_ts) start_time = request.args.get('start_time', "") stop_time = request.args.get('stop_time', "") remark = request.args.get('remark', '') #macro_during = request.args.get('macro_during', 3600) micro_during = request.args.get("interfer_during", 3600) must_keywords = request.args.get('must_keywords', '') # &&& should_keywords = request.args.get('should_keywords', '') print 'type::::stop_time', type(stop_time) print stop_time task_detail = dict() task_detail["task_name"] = task_name task_detail['pinyin_task_name'] = pinyin_task_name task_detail["submit_user"] = submit_user task_detail["stop_time"] = int(stop_time) task_detail["start_time"] = int(start_time) task_detail["remark"] = remark task_detail["must_keywords"] = must_keywords task_detail["should_keywords"] = should_keywords task_detail["submit_time"] = int(submit_time) #task_detail["macro_during"] = macro_during task_detail["micro_during"] = micro_during task_detail["finish"] = "0" # micro prediction finish task_detail["scan_text_time"] = datehour2ts(ts2datehour( float(submit_time))) - int(micro_during) # 上一次复制文本的时间 task_detail["scan_text_processing"] = "0" # 是否正在复制微博文本 task_detail["macro_value_finish"] = '0' task_detail["macro_trendline_finish"] = '0' exist_task = es_prediction.exists(index=index_manage_prediction_task, doc_type=type_manage_prediction_task, id=pinyin_task_name) print 'exist_task::::::::;', exist_task if not exist_task: es_prediction.index(index=index_manage_prediction_task, doc_type=type_manage_prediction_task, id=pinyin_task_name, body=task_detail) finish = ["1"] return json.dumps(finish)
def ajax_submit_event_task(): task_name = request.args.get('task_name', '') pinyin_task_name = pinyin.get(task_name.encode('utf-8'), format='strip', delimiter='_') submit_user = request.args.get('submit_user', '*****@*****.**') current_ts = int(time.time()) submit_time = request.args.get('submit_time', current_ts) start_time = request.args.get('start_time', '') stop_time = request.args.get('stop_time', '') must_keywords = request.args.get('must_keywords', '') should_keywords = request.args.get('should_keywords', '') task_detail = dict() task_detail["task_name"] = task_name task_detail['pinyin_task_name'] = pinyin_task_name task_detail["submit_user"] = submit_user task_detail["start_time"] = long(start_time) print '36::::', task_detail["start_time"] print '36::::', type(task_detail["start_time"]) task_detail["stop_time"] = long(stop_time) task_detail["submit_time"] = int(submit_time) task_detail["must_keywords"] = must_keywords task_detail["should_keywords"] = should_keywords task_detail["finish"] = "0" task_detail["scan_text_finish"] = "0" # 是否正在复制微博文本 task_detail["event_value_finish"] = "0" # 事件分析状态 #event_value_finish #事件是否计算完 0 未计算且未提交队列 1 已提交队列尚未计算 2 开始计算 3 计算完成 # finish: -1 weibo_counts\uid_counts -2 time -3 geo -4 network -5 sentiment #index_name = "event_analysis_task_"+task_name ''' exist_task = es.exists(index=index_manage_event_analysis, doc_type=type_manage_event_analysis, id=pinyin_task_name) if not exist_task: es.index(index=index_manage_event_analysis,doc_type=type_manage_event_analysis,id=pinyin_task_name, body=task_detail) finish = ['submit_success'] else: finish = ['already_exists'] return json.dumps(finish) ''' try: es.index(index=index_manage_event_analysis,doc_type=type_manage_event_analysis,id=pinyin_task_name, \ body=task_detail) result = ["1"] #提交成功 except: result = ["0"] #提交失败 return json.dumps(result)
def ajax_delete_social_sensors(): finish = ["0"] delete_user = request.args.get("delete_users", '') # &.join task_detail = es_prediction.get(index="manage_sensing_task", doc_type="task", id="social_sensing_task")["_source"] sensors = json.loads(task_detail["social_sensors"]) if delete_user: uid_list = delete_user.split("&") new_list = set(sensors) - set(uid_list) task_detail["social_sensors"] = json.dumps(list(new_list)) es_prediction.index(index="manage_sensing_task", doc_type="task", id="social_sensing_task", body=task_detail) finish = ["1"] return json.dumps(finish)
def ajax_add_social_sensor(): finish = ["0"] add_user = request.args.get("add_users", '') # &.join task_detail = es_prediction.get(index="manage_sensing_task", doc_type="task", id="social_sensing_task")["_source"] sensors = json.loads(task_detail["social_sensors"]) if add_user: uid_list = add_user.split(",") #改成了“,” if uid_list: in_set = set(uid_list) & set(sensors) out_set = set(uid_list) - set(sensors) if out_set: new_list = list(set(uid_list) | set(sensors)) task_detail["social_sensors"] = json.dumps(new_list) es_prediction.index(index="manage_sensing_task", doc_type="task", id="social_sensing_task", body=task_detail) finish = ["1"] results = [list(in_set), list(out_set)] #return json.dumps(results) return json.dumps(finish)