async def init_bg_info(msg): # 初始化报工分组 get_logger().info('报工分组数据同步') get_logger().info(msg) # 更新数据库中数据 sync_group_info(msg)
def sync_materiel_group(msg): materiel_group_list = [] if msg[0]['children']: msg = msg[0]['children'][0].setdefault('instance_list', []) for g in msg: group_code = g['code'] group_name = g['name'] group_materiel_list = [] group_materiel_name_list = [] if g['children']: group_detail = g['children'][0].setdefault('instance_list', []) for x in group_detail: group_materiel_list.append(x['code']) group_materiel_name_list.append(x['name']) materiel_group_list.append({ 'code': group_code, 'name': group_name, 'materiel_list': group_materiel_list, 'materiel_names': group_materiel_name_list }) update_context('materiel_group_info', materiel_group_list) get_logger().info('materiel_group_info sync') get_logger().info(materiel_group_list)
async def get_process(self, product_line, process_code): try: timestamp = get_now_timestamp() await change_plan(product_line, process_code, timestamp) all_data = get_context("all_data") all_data["current_plan"] = await get_current_plan_by_group( product_line) all_data[ "data_collection"] = await get_positive_and_nagetive_count( product_line, timestamp) websocket_list = get_context("websocket_list") for handler in websocket_list: try: await send_mes(handler, mes_type="data_collection", data={ "data_collection": all_data["data_collection"], "current_plan": all_data["current_plan"] }) except Exception as e: get_logger().error("send_mes error:%s", e) self.send_response_data(MesCode.success, {}, 'success get data') except Exception as e: self.send_response_data(MesCode.fail, {}, str(e))
async def get_process(self, product_line): """ url: GET /api/plan_management/manage/(?P<product_line>.*)/dispatched_plan/list :return: [ { "plan_no": "A181120203-00", "sequence": 1, //计划之间的顺序 "material_name": "T901H40ADC12V 4脚 23规格", "material_code": "materiel_t901h40adc12v4_23", "plan_count": 10000, "qualified_count": 1000, "unqualified_count": 2 } ] """ # 获取当日所有已下发到某个产线的计划 current_date = str(datetime.now().date()) try: plan_list = await get_current_day_all_plans(product_line) self.send_response_data(MesCode.success, plan_list, '') get_logger().info('成功获取:{}日所有已下发计划数据:{}'.format( current_date, plan_list)) except Exception as e: log_exception(e, '获取{}计划数据失败'.format(current_date)) self.send_response_data(MesCode.fail, None, '获取{}计划数据失败'.format(current_date))
async def change_plan(product_line, process_code, timestamp): try: logger = get_logger() process_sequence_dict = get_process_sequence_dict(product_line) logger.debug("process_sequence_dict~~~~~~~~~~~~~~~~~~~~~:%s", process_sequence_dict) current_plan_dict = await get_current_plan() current_plan = current_plan_dict.get(process_code, None) logger.info("current_plan~~~~~~~~~~~~~~~~~~~~~:%s", current_plan) plan_code = get_next_plan(current_plan) try: change_sequence_list = get_change_sequence_list(product_line) change_sequence_list = sorted([i for i in change_sequence_list], key=lambda x: float(x)) first_process_code = change_sequence_list[0]["process_code"] if first_process_code: await post_update_plan(plan_code, current_plan) except Exception as e: logger.error("change_sequence_list error:%s",e) logger.debug("plan_code~~~~~~~~~~~~~~~~~~~~~:%s", plan_code) process_sequence = process_sequence_dict[process_code] logger.debug("process_sequence~~~~~~~~~~~~~~~~~~~~~:%s", process_sequence) process_change_list = get_process_code_list_by_change(product_line, process_sequence, process_sequence_dict) logger.debug("process_change_list~~~~~~~~~~~~~~~~~~~~~:%s", process_change_list) await insert_plan(process_change_list, product_line, plan_code, timestamp) return {process_code: plan_code for process_code in process_change_list} except Exception as e: traceback.print_exc() get_logger().error("change_plan error :%s", e) raise e
async def get_plan_list(self, query): plan_data_list = [] plan_collection = get_plan_db_collection() columns = { "task_no": 1, "material_name": 1, "plan_count": 1, "plan_start_date": 1, "workshop_name": 1, "product_line_code": 1, "operator": 1, "create_time": 1, "modified_time": 1, "plan_status": 1, "plan_type": 1, "real_end_date": 1, "_id": 0 } for column in get_custom_field_config().keys(): columns.update({column: 1}) try: async for document in plan_collection.find(query, columns).sort( "real_end_date", -1): plan_data_list.append(document) self.send_response_data(MesCode.success, plan_data_list, '获取历史任务成功') get_logger().info('获取历史任务成功, 数据: {}'.format(plan_data_list)) except Exception as e: log_exception(e, '获取历史任务失败') self.send_response_data(MesCode.fail, None, '获取历史任务失败')
async def get_process(self, product_line, plan_start_date): """ url: GET /api/plan_management/manage/(?P<product_line>.*)/(?P<plan_start_date>.*)/dispatched_plan/list :param product_line: 产线编码 :param plan_start_date: 计划开始日期 :return: 该产线在该计划开始日期当天其余所有已下发过的计划 { "code":"success", "info": "", "data": [ { "task_no": "11111", "material_name": "物料A", "plan_status": 2, //已下发/1, 进行中/2, 已完工/4 } ] } """ plan_list = [] try: # 获取该产线该天所有状态为已下发,进行中和已完工的计划 query = { '$and': [{ '$or': [{ 'plan_status': PlanStatusType.dispatched.value }, { 'plan_status': PlanStatusType.in_progress.value }, { 'plan_status': PlanStatusType.finished.value }] }, { 'product_line_code': product_line, 'plan_start_date': plan_start_date }] } plan_collection = get_plan_db_collection() async for document in plan_collection.find(query).sort( 'plan_seq_no', ASCENDING): task_no = document['task_no'] material_name = document['material_name'] plan_status = document['plan_status'] plan_data = { 'task_no': task_no, 'material_name': material_name, 'plan_status': plan_status } plan_list.append(plan_data) self.send_response_data(MesCode.success, plan_list, '') get_logger().info('获取产线:{}, 计划开工日期:{}其他已下发过的计划成功: {}'.format( product_line, plan_start_date, plan_list)) except Exception as e: log_exception( e, '获取产线:{}, 计划开工日期:{}其他已下发过的计划失败'.format(product_line, plan_start_date)) self.send_response_data( MesCode.fail, None, '获取产线:{}, 计划开工日期:{}其他已下发过的计划失败'.format(product_line, plan_start_date))
async def get_process(self, start_date, end_date): """ url: GET /api/plan_management/manage/plan/history/start_date/end_date :return: { "code":"success", "info": "", "data": [ { "task_no": "A190112099-00", "material_name": "CH&YJ T901D40ADC24V-4", "plan_count": 2000, "plan_start_date": "2019-01-12 00:00:00", "real_end_date": null, "workshop_name": "六车间", "create_time": "2019-01-17 09:06:20.355889", "plan_status": 4, "product_line_code": "", "operator": "", "modified_time": "", "plan_type": 0, } ] } """ if start_date == "" and end_date == "": # 默认获取所有已完成(状态码为4)或者无法下达(状态码为5)的历史任务 query = { '$or': [{ 'plan_status': PlanStatusType.finished.value }, { 'plan_status': PlanStatusType.cant_dispatch.value }] } await self.get_plan_list(query) else: today = date.today().strftime('%Y-%m-%d') if start_date > today or end_date > today: get_logger().info('起始或截止时间超过今日') self.send_response_data(MesCode.fail, None, '起始或截止时间不得超过今日') elif start_date > end_date: get_logger().info('起始时间大于截止时间') self.send_response_data(MesCode.fail, None, '起始时间不得大于截止时间') else: query = { '$or': [{ 'plan_status': PlanStatusType.finished.value }, { 'plan_status': PlanStatusType.cant_dispatch.value }], "real_end_date": { '$gte': start_date, '$lte': end_date } } # 获取所有状态为已完成(状态码为4)并且完成时间在查询的时间段内的或者无法下达(状态码为5)的历史任务 await self.get_plan_list(query)
async def clear_plan_range(product_line): try: mongo_handler = get_handler(TaskKey.mongodb) plan_collection = mongo_handler.xldq.plan_range plan_collection.delete_many({"product_line":product_line}) except Exception as e: get_logger().error("get_plan_code_list error:%s", e) raise e
async def init_fz_info(msg): # 初始化考勤分组 get_logger().info('考勤分组数据同步') get_logger().info(msg) update_context('mdm_group_info', msg) # 更新数据库中数据 sync_group_info(msg)
async def post_process(self, product_line): try: plan_list = json.loads(self.request.body) get_logger().info( "receive plan data~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~:%s", plan_list) self.send_response_data(MesCode.success, {}, 'success get data') except Exception as e: self.send_response_data(MesCode.fail, {}, str(e))
async def send_reset_flag(product_line): client = HttpClient() url = "http://127.0.0.1:8989/api/monitor/recevie/%s" %product_line url = urllib.parse.quote(url, safe=string.printable) data = { "msg_type":"reset", } res = await client.post(url,data) get_logger().info("send_reset_flag res~~~~~~~~~~~~~~~~~~~~~~~~~~:%s",res) return True
async def get_process(self, product_line_code): try: process_info_list = [] group_config = get_group_config() report_point_config = get_report_point_config() if product_line_code in group_config and product_line_code in report_point_config: all_group_data_dict = group_config[product_line_code] report_point_list = report_point_config[product_line_code] if all_group_data_dict: for group_code, group_data in all_group_data_dict.items(): if group_data and 'process' in group_data: process_data_list = [] original_process_data_list = group_data['process'] if original_process_data_list: for origin_process_data in original_process_data_list: process_code, process_name, process_seq = origin_process_data[ 'code'], origin_process_data[ 'name'], origin_process_data[ 'sequence'] process_classify = origin_process_data[ 'classify'] if process_classify == '人工': process_type = 1 else: process_type = 0 is_report_point = 1 if process_code in report_point_list else 0 process_data = { 'name': process_name, 'code': process_code, 'seq_number': process_seq, 'type': process_type, 'is_report_point': is_report_point } process_data_list.append(process_data) if process_data_list: # 根据序号顺序排序,然后删除该字段 process_data_list = sorted( process_data_list, key=lambda k: int(k['seq_number'])) for process_data in process_data_list: del process_data['seq_number'] process_info = { 'group_code': group_code, 'process_list': process_data_list } process_info_list.append(process_info) get_logger().info('获取产线{}工序信息成功: {}'.format( product_line_code, process_info_list)) self.send_response_data(MesCode.success, process_info_list, '') except Exception as e: log_exception(e, '获取产线:{}所有人员信息失败'.format(product_line_code)) self.send_response_data( MesCode.fail, None, '获取产线:{}所有人员信息失败'.format(product_line_code))
def sync_timely_wage_info(msg): msg = msg[0]['children'] timely_wage_info = {} for m in msg: wage_type = m['directory_code'] if m['children']: for p in m['children'][0].setdefault('instance_list', []): timely_wage_info.update({p['code']: wage_type}) update_context('timely_wage_info', timely_wage_info) get_logger().info(timely_wage_info)
def on_message(conn, userdata, message): try: message = json.loads(message.payload, encoding="utf-8") get_logger().info("received message~~~~~~:%s", message) loop = asyncio.new_event_loop() # 执行coroutine loop.run_until_complete(post_data(message)) loop.close() # IOLoop.current().spawn_callback(post_data,message) except Exception as e: get_logger().error("received message error:%s",e)
async def get_working_hour_info(person_info=None): # 从考勤模块获取工时 try: store = get_store() report_module_url = store.data['data']['api']['working_hour'] client = HttpClient(AsyncHTTPClient(max_clients=1000)) report_data = await client.post(report_module_url, data=person_info) client.close() return report_data.data except Exception as e: get_logger().exception(e) raise Exception('考勤信息获取失败')
async def init_process_info(msg): # 初始化工序信息 get_logger().info('工序信息同步') get_logger().info(msg) # 更新库中数据 col = get_target_mongo_collection('process') process_info = msg.setdefault('instance_list', []) all_process_info_code = [] for p in process_info: all_process_info_code.append(p['code']) col.update({'code': p['code']}, {'$set': p}, upsert=True)
async def get_attendance_data(product_line): try: global all_data target_url = get_store().get_onwork_attendance_url() client = HttpClient(AsyncHTTPClient(max_clients=1000)) attendance_data = await client.get(target_url.format(product_line), data={}) get_logger().debug("get_attendance_data~~~~~~~~~~~~~~~~~~~~:%s", attendance_data.data) client.close() return attendance_data.data except Exception as e: get_logger().error("get_attendance_data error:%s",e) return {}
def hxd_handler(msg): get_logger().debug('hxd_handler msg~~~~~~~~~~~~~~~~~~~:%s ', msg) global change_process_dict line_list = msg[0]["children"][0]["instance_list"] for line in line_list: product_line = line["code"] change_process_dict.setdefault(product_line, []) process_list = line["children"][0]["instance_list"] for process in process_list: change_process_dict[product_line].append(process["code"]) get_logger().info('change_process_dict ~~~~~~~~~~~~~~~~~~~:%s ', change_process_dict)
async def send_change_flag(product_line,process_code_list,status): client = HttpClient() url = "http://127.0.0.1:8989/api/monitor/recevie/%s" %product_line url = urllib.parse.quote(url, safe=string.printable) data = { "msg_type":"status", "status":status, "process_code_list":process_code_list } res = await client.post(url,data) get_logger().info("send_change_flag res~~~~~~~~~~~~~~~~~~~~~~~~~~:%s",res) return True
async def get_plan_code_list(process_code): try: mongo_handler = get_handler(TaskKey.mongodb) plan_collection = mongo_handler.xldq.plan_range plan_list = plan_collection.find({ "process_code": process_code }, { 'plan_code': True }).distinct("plan_code") return plan_list except Exception as e: get_logger().error("get_plan_code_list error:%s", e) raise e
async def insert_plan(process_code_list, product_line, plan_code, timestamp): try: mongo_handler = get_handler(TaskKey.mongodb) plan_collection = mongo_handler.xldq.plan_range plan_collection.insert_many([{ "product_line": product_line, "plan_code": plan_code, "process_code": process_code, "timestamp": timestamp } for process_code in process_code_list]) except Exception as e: get_logger().error("insert_plan error:%s", e) raise e
async def get_report_info(): # 从报工模块获取当月信息 # TODO:报工模块将会返回具体到天的报工量,对应的计算接口需要调整 try: store = get_store() report_module_url = store.data['data']['api']['report'] client = HttpClient(AsyncHTTPClient(max_clients=1000)) report_data = await client.get(report_module_url, data={}) client.close() return report_data.data except Exception as e: get_logger().exception(e) raise Exception('报工信息获取失败')
async def get_process(self, product_line): try: t1 = time.time() all_data = get_context("all_data") # if all_data.get("is_computed"): # num_data = all_data # else: num_data = await get_init_all_data(product_line) get_logger().info("all data time:%s", time.time() - t1) self.send_response_data(MesCode.success, num_data, 'success get data') except Exception as e: self.send_response_data(MesCode.fail, {}, str(e))
async def idi_msg_process(self, msg): logger = get_logger() try: msg = json.loads(msg) logger.info("msg~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~:%s", msg) if msg['type'] == IdiMesType.idi_init: # await idi_init_msg_process(msg) pass elif msg['type'] == IdiMesType.idi_mdm_tag_info: await idi_init_msg_process(msg) else: logger.warning('Invalid message type') except Exception as e: get_logger().exception(e)
def process_timely_wage_info(self): request_body = loads(self.request.body) person_code = request_body['person_code'] wage_code = request_body['wage_code'] price = request_body['price'] try: price = float(price) except: price = 0 # 获取数据库handler col = get_target_mongo_collection('timely_wage') col.update({'person': person_code}, {'$set': {'wage_code': wage_code, 'price': price}}) get_logger().info('update timely wage for {}'.format(person_code))
async def init_product_line_info(msg): # 初始化所有产线信息 get_logger().info('产线信息同步') get_logger().debug(msg) # 更新库中数据 col = get_target_mongo_collection('product_line') product_line_info = msg.setdefault('instance_list', []) all_product_line_code = [] for p in product_line_info: all_product_line_code.append(p['code']) col.update({'code': p['code']}, {'$set': p}, upsert=True) # 移除多余产线信息 col.remove({'code': {'$nin': all_product_line_code}})
async def get_plan_time_range(timestamp): logger = get_logger() mongo_handler = get_handler(TaskKey.mongodb) plan_collection = mongo_handler.xldq.plan_range find_res = plan_collection.find({}).sort("timestamp", 1) find_res = [{ "plan_code": i["plan_code"], "timestamp": i["timestamp"], "process_code": i["process_code"] } for i in find_res] find_res = sorted(find_res, key=lambda x: x["process_code"]) logger.debug("plan_list~~~~~~~~~~~~~~~~~~~~~~~~~:%s", find_res) from itertools import groupby res = {} first_start_time = get_day_start_time(timestamp) for process_code, group in groupby(find_res, lambda p: p['process_code']): res.setdefault(process_code, {}) plan_list = list(group) for n, item in enumerate(plan_list): plan_code = item["plan_code"] process_code = item["process_code"] res[process_code].setdefault(plan_code, []) start_time = item["timestamp"] if start_time < first_start_time: first_start_time = start_time if n < len(plan_list) - 1: end_time = plan_list[n + 1]["timestamp"] else: end_time = get_now_timestamp() res[process_code][plan_code].append([start_time, end_time]) return res, first_start_time
def get_next_plan(current_plan): plan_list = get_context("plan_list") plan_code_list = [ i["planId"] for i in sorted(plan_list, key=lambda x: x["sequence"]) ] get_logger().debug("get_next_plan ~~~~~~~~~~~~~~~~~~ plan_list:%s", plan_code_list) length = len(plan_code_list) if not current_plan: return plan_code_list[0] elif current_plan in plan_code_list: index = plan_code_list.index(current_plan) if index < length - 1: return plan_code_list[index + 1] else: raise Exception("no next plan")
async def get_idi_data(tag_code_list, start_time, end_time, org="organization_xldq"): logger = get_logger() logger.debug( "get_idi_data start,tag_code_list info:%s,start_time:%s,end_time:%s", tag_code_list, start_time, end_time) try: if isinstance(start_time, datetime): start_time = start_time.strftime(date_format) end_time = end_time.strftime(date_format) url = get_store().get_idi_url() data = [{ "code": tag_code, "start_time": start_time, "end_time": end_time, } for tag_code in tag_code_list] logger.debug("idi request url:%s", url) logger.debug("idi request data:%s", data) res = await get_data(url, {org: data}) logger.debug("idi res data:%s", res) return res[org] if res else {} except Exception as e: traceback.print_exc() logger.error("get_idi_data error:%s", e) return {}