def handlePath(): print(request.data) print(request.get_json(force=True)["firstNode"]) print(request.get_json(force=True)["secondNode"]) ## # Data contains: # startNode is the id number # endNode is the id number ## data = request.get_json(force=True) startnode = int(data["firstNode"]) endnode = int(data["secondNode"]) # make the path here wet_nodes = [] for node in db.get_nodes(): if node["is_flooded"][utils.cur_date()]: wet_nodes.append(node["id"]) path_type, path = db.graph().shortest_path(startnode, endnode, wet_nodes) return jsonify({ "path_type": path_type, "path": path, "path_coords": [db.get_node("id", node_id)["coords"] for node_id in path] })
def report_water_level(node, lvl): """ Reports a water level at the given coordinates; adds value to mongoDB database :param node: node id :param lvl: integer representing water level @ node """ DB.report_rain_level(utils.cur_date(), node, lvl)
def get_nodes(self): """ Queries the mongoDB database for all nodes :return: """ for node in self.sorted_nodes(): self._update_avg_level(utils.cur_date(), node) self.update_is_flooded() return list(self._nodes.find({}, {'_id': False}).sort("id"))
def server_app(db): """ Runner for server. """ # db.add_all_nodes() global linear_model while True: # Each day, add date as key to dataset and drop the oldest day for i in range(7): time.sleep(86400) date_to_drop = utils.days_ago(utils.DAYS_TO_KEEP + 1) nodes = db.get_nodes() for node in nodes: node["rain_data"].pop(date_to_drop) node["avg_levels"].pop(date_to_drop) node["is_flooded"].pop(date_to_drop) node["rain_data"][utils.cur_date()] = [] node["avg_levels"][utils.cur_date()] = 0 node["is_flooded"][utils.cur_date()] = 0 # Each week, retrain the models using new data. # In production, use weather API data. For now, use generated values to test. # Dump updated nodewise average water level data to depths_train.txt depths_dict = node_api.get_reported_water_levels() depths = np.empty( (len(depths_dict.keys()), len(depths_dict[0].keys()))) for date_idx, date in enumerate(sorted(depths_dict[0].keys())): for node_id in sorted(depths_dict.keys()): this_mean = np.mean(depths_dict[node_id][date]) if not this_mean: this_mean = 0 depths[date_idx][node_id] = this_mean np.savetxt("depths_train.txt", depths) # Retrain models db.linear_model = models.LinearRainModel() for model in db.XGB_models.values(): model.train() model.test()
def get_reported_water_levels_today(): """ Returns reported water levels on the current date. :return: """ today = utils.cur_date() return { node["coords"]: node["rain_data"][today] for node in DB.get_nodes() }
def update_is_flooded(self): """ Predicts is_flooded for all nodes and uploads the data to the mongoDB server. :return: """ reports_plus_predictions = self.predict_is_flooded() for node in self._nodes.find({}, {'_id': False}): node["is_flooded"][utils.cur_date()] = reports_plus_predictions[ node["id"]] update = {"$set": {"is_flooded": node["is_flooded"]}} self._nodes.update({"id": node["id"]}, update)
def predict_is_flooded(self): """ Predicts the value of all nodes with no reported data, using either the linear or XGBoost models. Uses reported values for those that have it. """ queried_nodes = self.sorted_nodes() reported_is_flooded = [node["is_flooded"] for node in queried_nodes] linear_predictions = self.linear_model.fit( weather.get_precipitation(utils.cur_date())) reported_with_linear = list(reported_is_flooded) # Insert linear prediction for values with no reported data for node in queried_nodes: if node["id"] == 6: print(node["avg_levels"][utils.cur_date()]) print(node["is_flooded"][utils.cur_date()]) # No reports; pred w linear if len(node["rain_data"][utils.cur_date()]) == 0: if linear_predictions[node["id"]] == True: reported_with_linear[node["id"]] = 2 else: reported_with_linear[node["id"]] = 0 # Reports; use existing else: if reported_is_flooded[node["id"]][utils.cur_date()] == 1: reported_with_linear[node["id"]] = 1 else: reported_with_linear[node["id"]] = 0 reported_with_linear_and_XGB = list(reported_with_linear) # Replace linear predictions with XGB predictions when possible for node_id in self.XGB_models.keys(): if len(queried_nodes[node_id]["rain_data"][utils.cur_date()]) == 0: xgb_prediction = self.XGB_models[node_id].predict([ reported_with_linear[:node_id] + reported_with_linear[node_id + 1:] ]) print(xgb_prediction) if xgb_prediction: reported_with_linear_and_XGB[node_id] = 2 else: reported_with_linear_and_XGB[node_id] = 0 print(reported_with_linear_and_XGB[6]) return reported_with_linear_and_XGB
def on_btn_audit_click(self, shzt: bool, xmzd: str): ''' :param shzt: 审核/取消审核 :param xmzd: 审核:项目诊断;取消审核:取消原因 :return: ''' if not self.cur_tjbh: mes_about(self, '请选择体检顾客!') return # 审核 if shzt: # 更新TJ_TJJLMXB、TJ_EQUIP、TJ_CZJLB try: self.session.query(MT_TJ_TJJLMXB).filter( MT_TJ_TJJLMXB.tjbh == self.cur_tjbh, MT_TJ_TJJLMXB.zhbh == self.cur_zhbh).update({ MT_TJ_TJJLMXB.zxpb: '1', MT_TJ_TJJLMXB.jsbz: '1', MT_TJ_TJJLMXB.qzjs: None, MT_TJ_TJJLMXB.ycbz: '1', MT_TJ_TJJLMXB.shrq: cur_datetime(), MT_TJ_TJJLMXB.shys: self.login_id, MT_TJ_TJJLMXB.zd: xmzd }) data_obj = { 'jllx': '0124', 'jlmc': '%s结果录入' % get_key(self.equips, self.cur_zhbh), 'tjbh': self.cur_tjbh, 'mxbh': self.cur_zhbh, 'czgh': self.login_id, 'czxm': self.login_name, 'czqy': self.login_area, 'jlnr': xmzd, 'bz': None } self.session.bulk_insert_mappings(MT_TJ_CZJLB, [data_obj]) self.gp_review_user.statechange(self.login_name, cur_date()) self.session.commit() mes_about(self, "审核完成!") except Exception as e: self.session.rollback() mes_about(self, "审核失败,错误信息:%s" % e) return # 取消审核 else: bgys = self.table_report_equip.getCurItemValueOfKey('bgys') if self.login_name != bgys: mes_about(self, '该报告不是您审核的,您没有权限修改!') return try: self.session.query(MT_TJ_TJJLMXB).filter( MT_TJ_TJJLMXB.tjbh == self.cur_tjbh, MT_TJ_TJJLMXB.zhbh == self.cur_zhbh).update({ MT_TJ_TJJLMXB.zxpb: '0', MT_TJ_TJJLMXB.jsbz: '0', MT_TJ_TJJLMXB.qzjs: None, MT_TJ_TJJLMXB.ycbz: '1', MT_TJ_TJJLMXB.shrq: None, MT_TJ_TJJLMXB.shys: None, MT_TJ_TJJLMXB.zd: None }) data_obj = { 'jllx': '0128', 'jlmc': '%s结果录入取消' % get_key(self.equips, self.cur_zhbh), 'tjbh': self.cur_tjbh, 'mxbh': self.cur_zhbh, 'czgh': self.login_id, 'czxm': self.login_name, 'czqy': self.login_area, 'jlnr': '%s报告取消审核' % get_key(self.equips, self.cur_zhbh), 'bz': xmzd } self.session.bulk_insert_mappings(MT_TJ_CZJLB, [data_obj]) self.session.commit() self.gp_review_user.statechange() mes_about(self, "取消审核完成!") except Exception as e: self.session.rollback() mes_about(self, "取消审核失败,错误信息:%s" % e) return