def update(state, clinton_percentage, sanders_percentage): yesterday = get_current_date() - timedelta(days=1) tomorrow = get_current_date() + timedelta(days=1) if not models.Daily.query.filter(and_(models.Daily.date > yesterday, models.Daily.date < tomorrow, models.Daily.state == state)).first(): date = models.Daily(state=state,date=get_current_date(), clinton_percentage=clinton_percentage, sanders_percentage=sanders_percentage) db.session.add(date) db.session.commit()
def performance(method, confusion_matrix): conn = pymysql.connect(host='10.214.163.179', user='******', password='******', port=3306, database='dt_yc') cursor = conn.cursor() tn, fp, fn, tp = confusion_matrix.ravel() precision = float(tp / (tp + fp)) recall = float(tp / (tp + fn)) f1_score = float(2 * (recall * precision) / (recall + precision)) if method == train_random_forest: method_name = 'random forest' elif method == train_decision_tree: method_name = 'decision tree' elif method == train_knn: method_name = 'knn' else: return current_date = get_current_date() year = int(current_date[0:4]) month = int(current_date[5:7]) insert = "INSERT INTO dt_yc.model_train_performance VALUES(%s, %s, %s, %s, %s, %s)" val = [method_name, year, month, precision, recall, f1_score] cursor.execute(insert, val) conn.commit() cursor.close() conn.close()
def send_chart_to_user(chat_id, message_id, image_url, labels, current_prices, price_changes, created_dates): """ :param chat_id: telegram chat_id :param message_id: telegram message_id :param image_url: url of stored chart """ current_date = utils.get_current_date() caption = f"Price changes since _{created_dates[0]}_:\n\n" for i, label in enumerate(labels): caption += f"{label}: ${current_prices[i]} *({price_changes[i]:.1f}%)*\n" caption += f"\n_Last updated on {current_date}_" chart = open(image_url, "rb") try: bot.editMessageMedia(chat_id=chat_id, message_id=message_id, media=telegram.InputMediaPhoto(chart, caption=caption, parse_mode='Markdown' ) ) logger.info(f"Updated chart {message_id} for {chat_id}") except telegram.error.BadRequest: logger.info(f"Already updated chart {message_id} for {chat_id}, skipping...")
def get_bill_by_month(self, current_month=True, queue=None): year, month, last_day_in_month = utils.get_current_date( current=current_month) date, bill = self.get_bill(year=year, month=month, last_day_in_month=last_day_in_month) queue.put((date, bill))
def record_screen(self, meeting_name: str) -> None: date = utils.get_current_date() output_file_name = meeting_name + date self.recording = True recorder = ScreenRecorder(output_file_name) recorder.run() self.recording = False
def send_first_graph(update, context): index = context.chat_data['chosen_variant_index'] item_variant = context.chat_data["variants"][index] current_price = item_variant['current_price'] variant_id = item_variant['variant_id'] chat_id = str(update.message.chat.id) # Create image photo_url = plotly_utils.generate_photo_url(update,context) # update.message.reply_photo(photo=open("images/fig1.png", "rb")) photo = open(photo_url, "rb") updated_date = utils.get_current_date() message = bot.send_photo(chat_id=update.message.chat.id, photo=photo, parse_mode='Markdown', caption=f"_Last updated on {updated_date}_") chart_id = str(message.message_id) perm_save_url = f"{plotly_utils.IMAGE_DESTINATION}{chat_id}_{chart_id}.png" # Update photo url with message id photo.close() if photo_url is not plotly_utils.SAMPLE_IMAGE_URL: shutil.move(photo_url, perm_save_url) # Store in context context.chat_data['chart_id'] = chart_id logger.info("BOT: sent first chart.")
def _test_month_year(days, test='month_int'): """Test for month and year. :param days: A list of integers representing a logical sequence of day :param test: Take values 'month_int' or 'year' """ current_month = int(get_current_date('month_int')) current_test = value = int(get_current_date(test)) result = [] if test == 'month_int': value = 1 if current_month == 12 else current_test + 1 elif test == 'year': value = current_test + 1 if current_month == 12 else current_test for day in days: if day < days[0]: result.append(value) else: result.append(current_test) return result
def main(): # Read config file for date current_date = get_current_date() # Get command line arguments args = get_arguments() # Check date handling commands and execute the corresponding routine if args.advance_date is not None: current_date = print(advance_date(args.advance_date)) return current_date if args.set_date is not None: current_date = print(set_date(args.set_date)) return current_date # Check commands and execute the corresponding routine if args.CLI_command.lower() == 'buy': print( buy(args.product_name, args.buy_date, args.price, args.expiration_date)) elif args.CLI_command.lower() == 'sell': print(sell(args.product_name, args.sell_date, args.price)) elif args.CLI_command.lower() == 'report': # Convert yesterday, now, today or date to date report_date = None if args.yesterday is not None: report_date = (datetime.strptime(current_date, '%Y-%m-%d') - timedelta(days=1)).strftime('%Y-%m-%d') if args.now is not None: report_date = current_date if args.today is not None: report_date = current_date if args.date is not None: report_date = args.date if report_date is not None: print( report.show_report(args.report_name, report_date, args.export_csv, args.show_graph)) else: print(f"ERROR: missing <date>") # Unknown command else: print( f"ERROR: unknown command '{args.CLI_command}' <buy, sell, report>") return
def predict(method_type): csv_file_name = 'features/' + get_current_date() + '-feature.csv' csv_file = open(csv_file_name, 'r') reader = csv.reader(csv_file) rows = list(reader) cnt = 0 predict_X = [] tranid_set = [] for row in rows: tranid_set.append(row[0]) predict_X.append(row[1:]) cnt = cnt + 1 if cnt % 10000 == 0: print('预测特征读取中,已完成' + str(cnt) + '条') predict_X = [[float(i) if i.replace('.', '', 1).isdigit() else 0 for i in j] for j in predict_X] predict_X = np.asarray(predict_X, dtype=np.float32) if method_type == 'random_forest': clf = load('rf.joblib') rows = clf.predict_proba(predict_X) predict_Y = [] for row in rows: predict_Y.append(row[1]) write_back(tranid_set, predict_Y) print('预测完成,使用模型为random forest') elif method_type == 'decision_tree': clf = load('dt.joblib') rows = clf.predict_proba(predict_X) predict_Y = [] for row in rows: predict_Y.append(row[1]) write_back(tranid_set, predict_Y) print('预测完成,使用模型为decision tree') elif method_type == 'knn': clf = load('knn.joblib') rows = clf.predict_proba(predict_X) predict_Y = [] for row in rows: predict_Y.append(row[1]) write_back(tranid_set, predict_Y) print('预测完成,使用模型为knn') print('------------------------------------------------------') print('------------------------------------------------------')
def handle_forgot_password(token, user, new_password): load_token(token=token, salt='forget-password', redirect_to='login') if user: new_password = generate_password_hash(password=new_password, method='pbkdf2:sha256', salt_length=8) try: user.password = new_password except AttributeError: return abort(400) db.session.commit() password_changed_notification(user.email, user.name, get_current_date()) flash("Password changed successfully.") return redirect(url_for('login_system.login')) else: flash("Could not find a user with the specified email address.") return redirect(url_for('login_system.login'))
def handle_email_verification(token, public_id): load_token(token=token, salt='email-verify', redirect_to='login_system.register') user = User.query.filter_by(public_id=public_id).first() if user: if not user.confirmed_email: user.confirmed_email = True user.join_date = get_current_date() db.session.commit() login_user(user) flash("You've confirmed your email successfully.") else: flash("You've already confirmed your email.") return redirect(url_for('main.home')) else: flash("This user does not exist.") return redirect(url_for('login_system.register'))
def write_run_file(self, times=1): xml_list = utils.FileOperation.get_xml_files(self._xml_path) xml_count = len(xml_list) case_list = [] for xml in xml_list: case_list.append({"case": xml, "loop": times}) data = { "info": { "count": xml_count, "time": utils.get_current_date() + " " + utils.get_current_time(), "loop": times }, "cases": case_list } utils.FileOperation.store_json( os.path.join(self._xml_path, "run.json"), data)
def write_back(tranid_set, result_set): start = time.time() conn = pymysql.connect(host='10.214.163.179', user='******', password='******', port=3306, database='dt_yc') cursor = conn.cursor() query = 'DROP TABLE IF EXISTS dt_yc.model_predictions' cursor.execute(query) conn.commit() query = """ CREATE TABLE dt_yc.model_predictions( TRANID varchar(100), PROBABILITY decimal(10, 4) ) """ cursor.execute(query) conn.commit() query = 'CREATE INDEX index_predictions ON model_predictions(TRANID)' cursor.execute(query) conn.commit() csv_file_name = 'results/' + get_current_date() + '-result.csv' csv_file = open(csv_file_name, 'w', newline='') writer = csv.writer(csv_file) writer.writerow(['tranid', 'probability']) for i in range(0, len(tranid_set)): tranid = tranid_set[i] prob = result_set[i] writer.writerow([tranid, float(prob)]) insert = 'INSERT INTO dt_yc.model_predictions VALUES(%s, %s)' var = [tranid, round(float(prob), 4)] cursor.execute(insert, var) conn.commit() if (i+1) % 10000 == 0: print('写回中,已完成' + str(i+1) + '条') cursor.close() conn.close() elapsed = (time.time() - start) print('预测结果写回完毕,运行时间:' + str(round(elapsed / 60, 2)) + '分钟')
async def get_analysis_result_response(file): analysis_coroutine = scan_file(get_file(hash), app.config["DOCKER_CONFIG_PATH"]) analysis_response = await analysis_coroutine table_html_scan_results, av_count, infected_count, icon = parse_analysis_result( analysis_response) if 'scan_date' not in app.config["files_by_hash"][hash]: scan_date = get_current_date() app.config["files_by_hash"][hash]['scan_date'] = "{} {}".format( scan_date[0], scan_date[1].replace('-', ':')) scan_date_string = app.config["files_by_hash"][hash]['scan_date'] return { "response_json": analysis_response, "table_html_scan_results": table_html_scan_results, "av_count": av_count, "infected_count": infected_count, "icon": icon, "scan_date": scan_date_string }
def validate_imports(data): validate(data, SCHEMA_CITIZENS) citizens = {} for citizen in data['citizens']: citizens[citizen['citizen_id']] = { 'birth_date': citizen['birth_date'], 'relatives': citizen['relatives'] } # check own id in relatives assert citizen['citizen_id'] not in citizen['relatives'] # unique citizen_id check assert len(citizens) == len(data['citizens']) for cid, citizen in citizens.items(): # check correct date assert get_current_date() > parse_date(citizen['birth_date']) #check correct relatives for relative in citizen['relatives']: assert cid in citizens[relative]['relatives']
def _search_proc(self): """定时抢票""" # 1.跳转到查票的界面 self.driver.get(self.search_ticket_url) # 2.选择单程 self.driver.find_element_by_xpath('//label[@for="dc"]').click() # 之所以要設置一個等待,是因為我在測試的時候發現,如果操作過快,在最後點擊搜索的時候,會卡在搜索狀態,一直顯示正在搜索。。。。。。 time.sleep(1) # 3.出发地输入框、目的地输入框、出发时间输入框 fromStationInput = self.driver.find_element_by_id("fromStationText") toStationInput = self.driver.find_element_by_id("toStationText") departTimeInput = self.driver.find_element_by_id("train_date") # 4.1 选择出发地 # 点击,输入出发地,列表中选择目的城市 ActionChains(self.driver).click(fromStationInput).send_keys( self.from_station).perform() try: list_0 = self.driver.find_elements_by_xpath( '//div[@id="panel_cities"]//div/span[1]') for vb in list_0: if vb.text == self.from_station: time.sleep(1) vb.click() break else: print('过滤掉这个城市') except: pass time.sleep(1) # 4.2 选择目的地 ActionChains(self.driver).click(toStationInput).send_keys( self.to_station).perform() try: list_01 = self.driver.find_elements_by_xpath( '//div[@id="panel_cities"]//div/span[1]') for vb2 in list_01: if vb2.text == self.to_station: time.sleep(1) vb2.click() break else: print('过滤掉这个城市') except: pass time.sleep(1) # 5.选择出发时间 js = "document.getElementById('train_date').removeAttribute('readonly')" # del train_date readonly property self.driver.execute_script(js) departTimeInput.clear() # 出发时间:默认是今天 if not self.depart_time: self.depart_time = get_current_date() ActionChains(self.driver).click(departTimeInput).send_keys( self.depart_time).perform() time.sleep(1) # 6.显示等待【查询按钮】可以被点击 WebDriverWait(self.driver, 1000).until( EC.element_to_be_clickable((By.ID, "query_ticket"))) # 7.定时抢票 # 7.1 立即开始抢票 if self.timer == '0' or not self.timer: return # 7.2 定时抢票,等待 # 假设 15:00:00 开始放票,提前 5 分钟开始刷票 else: timer_h = self.timer.split(':')[0] timer_m = self.timer.split(':')[1] timer_s = self.timer.split(':')[2] while True: # 当前时间 current_time = time.localtime() print(time.localtime()) if (current_time.tm_hour == int(timer_h) and current_time.tm_min >= int(timer_m) and current_time.tm_sec >= int(timer_s)): # if (current_time.tm_hour == 14 and current_time.tm_min >= 55 and current_time.tm_sec >= 0): print(u'开始刷票') break else: # 程序等待中,每30秒打印一下时间 if current_time.tm_sec % 30 == 0: print('等待中,当前时间:%s' % (time.strftime('%H:%M:%S', current_time))) print('还未到时间,休眠1秒钟') time.sleep(1) print('==' * 40)
def create_mambo_crossnet_table(input_file, src_file, dst_file, dataset_name, db_id, src_node_index, dst_node_index, mode_name1, mode_name2, output_dir, full_crossnet_file, db_edge_file, src_mode_filter, dst_mode_filter, mambo_id_counter_start, skip_missing_ids, verbose=False, delimiter=DELIMITER): inFNm = input_file srcFile = src_file dstFile = dst_file dataset = dataset_name db_id = db_id srcIdx = src_node_index dstIdx = dst_node_index src_db_id = utils.parse_dataset_id_from_name(os.path.basename(srcFile)) dst_db_id = utils.parse_dataset_id_from_name(os.path.basename(dstFile)) mode_name1 = utils.parse_mode_name_from_name( os.path.basename(srcFile)) if mode_name1 is None else mode_name1 mode_name2 = utils.parse_mode_name_from_name( os.path.basename(dstFile)) if mode_name2 is None else mode_name2 outFNm = full_crossnet_file if outFNm is None: outFNm = os.path.join( output_dir, utils.get_full_cross_file_name(mode_name1, mode_name2)) outFNm2 = db_edge_file if outFNm2 is None: outFNm2 = os.path.join( output_dir, utils.get_cross_file_name(mode_name1, mode_name2, db_id, dataset)) src_mapping = utils.read_mode_file(srcFile) if os.path.samefile(srcFile, dstFile): dst_mapping = src_mapping else: dst_mapping = utils.read_mode_file(dstFile) src_filter = utils.get_filter(src_mode_filter) dst_filter = utils.get_filter(dst_mode_filter) add_schema = True counter = mambo_id_counter_start if counter == -1: counter = utils.get_max_id(outFNm) if verbose: print 'Starting at mambo id: %d' % counter with open(inFNm, 'r') as inF, open(outFNm, 'a') as fullF, open(outFNm2, 'w') as dbF: # Add schema/metadata if counter == 0: fullF.write('# Full crossnet file for %s to %s\n' % (mode_name1, mode_name2)) fullF.write('# File generated on: %s\n' % utils.get_current_date()) fullF.write( '# mambo_eid%sdataset_id%ssrc_mambo_nid%sdst_mambo_nid\n' % (delimiter, delimiter, delimiter)) dbF.write('# Crossnet table for dataset: %s\n' % dataset) dbF.write('# File generated on: %s\n' % utils.get_current_date()) # Process file for line in inF: if line[0] in COMMENT: continue vals = utils.split_then_strip(line, delimiter) if add_schema: attrs_schema = '# mambo_eid%ssrc_dataset_id%sdst_dataset_id' % ( delimiter, delimiter) for i in range(len(vals)): if i != srcIdx and i != dstIdx: attrs_schema += '%sC%d' % (delimiter, i) dbF.write('%s\n' % attrs_schema) add_schema = False id1 = vals[srcIdx] id2 = vals[dstIdx] if src_filter: id1 = src_filter(id1) if dst_filter: id2 = dst_filter(id2) if id1 == '' or id2 == '': continue if skip_missing_ids and (id1 not in src_mapping or id2 not in dst_mapping): #print id1, id2 continue attr_strs = '' for i in range(len(vals)): if i != srcIdx and i != dstIdx: attr_strs += delimiter + vals[i] fullF.write('%d%s%d%s%d%s%d\n' % (counter, delimiter, db_id, delimiter, src_mapping[id1], delimiter, dst_mapping[id2])) dbF.write('%d%s%d%s%d%s\n' % (counter, delimiter, src_db_id, delimiter, dst_db_id, attr_strs)) counter += 1 if verbose: print 'Ending at mambo id: %d' % counter
def create_mapped_mode_table(mode_name, input_file, dataset_name, db_id, mapping_file, skip, map_index, node_index, output_dir, full_mode_file, db_node_file, delimiter=DELIMITER): if full_mode_file is None: full_mode_file = os.path.join(output_dir, utils.get_full_mode_file_name(mode_name)) full_mode_map = {} if os.path.isfile(full_mode_file): with open(full_mode_file, 'r') as fm_file: for line in fm_file: if line[0] in COMMENT: # skip comments continue split_line = line.strip().split(delimiter) full_mode_map[int(split_line[0])] = split_line[1] if db_node_file is None: db_node_file = os.path.join( output_dir, utils.get_mode_file_name(mode_name, db_id, dataset_name)) max_id = 0 mapping = {} num_cols = 0 with open(mapping_file, 'r') as mf: for line in mf: if line[0] in COMMENT: continue split_line = line.strip().split(delimiter) num_cols = len(split_line) mapping[split_line[map_index]] = split_line[0] max_id = int(split_line[0]) has_header = True seen = set() seen_counter = set() with open(full_mode_file, 'w') as fm_file, \ open(input_file, "r") as in_file, \ open(db_node_file, 'w') as db_file, open( mapping_file, 'a') as mf: fm_file.write('# Full mode table for %s\n' % mode_name) fm_file.write('# File generated on: %s\n' % utils.get_current_date()) fm_file.write('# mambo_nid%sdataset_ids\n' % delimiter) db_file.write('# Mode table for dataset: %s\n' % dataset_name) db_file.write('# File generated on: %s\n' % utils.get_current_date()) add_schema = True for line in in_file: if line[0] in COMMENT or has_header: # skip comments has_header = False continue vals = utils.split_then_strip(line, delimiter) if add_schema: attrs_schema = '# mambo_nid%sdataset_nid' % delimiter for i in range(len(vals)): if i != node_index: attrs_schema += '%sC%d' % (delimiter, i) db_file.write('%s\n' % attrs_schema) add_schema = False node_id = vals[node_index].split('.') node_id = node_id[0] if len(node_id) == 1 else node_id[1] if node_id in seen or len(node_id) == 0: continue attrs_str = '' for i in range(len(vals)): if i != node_index: attrs_str += delimiter + vals[i] counter = 0 if node_id in mapping: counter = int(mapping[node_id]) elif not skip: max_id = max_id + 1 counter = max_id result = "%d%s" % (counter, delimiter) for i in range(num_cols - 1): label = NONE if i + 1 != map_index else node_id result = result + label + delimiter result = result.strip(delimiter) + '\n' mf.write(result) db_ids = full_mode_map[counter] + "," + str( db_id) if counter in full_mode_map else str(db_id) fm_file.write('%d%s%s\n' % (counter, delimiter, db_ids)) db_file.write('%d%s%s%s\n' % (counter, delimiter, vals[node_index], attrs_str)) seen.add(node_id) seen_counter.add(counter) for counter in full_mode_map: if counter not in seen_counter: fm_file.write('%d%s%s\n' % (counter, delimiter, full_mode_map[counter]))
def validate_patch(data): validate(data, SCHEMA_PATCH) if 'birth_date' in data: assert get_current_date() > parse_date(data['birth_date'])
def feature_extraction(): # 作为dict访问的默认值 default = -1 current_date = get_current_date() start = time.time() conn = pymysql.connect(host='10.214.163.179', user='******', password='******', port=3306, database='dt_yc') cursor = conn.cursor() # 本来需要将提取好的特征存到数据库中,现在只需要存到csv文件中即可 # query = 'DROP TABLE IF EXISTS dt_yc.model_ele_info_feature' # cursor.execute(query) # conn.commit() # query = """ # CREATE TABLE dt_yc.model_ele_info_feature( # PROBABILITY decimal(10, 4), # TRANID varchar(50), # EQU_SAFE_LEVEL_1 int(11), # EQU_SAFE_LEVEL_2 int(11), # EQU_SAFE_LEVEL_3 int(11), # APPLY_LOCATION_1 int(11), # APPLY_LOCATION_2 int(11), # APPLY_LOCATION_3 int(11), # APPLY_LOCATION_4 int(11), # APPLY_LOCATION_5 int(11), # APPLY_LOCATION_6 int(11), # APPLY_LOCATION_7 int(11), # APPLY_LOCATION_8 int(11), # APPLY_LOCATION_9 int(11), # APPLY_LOCATION_10 int(11), # APPLY_LOCATION_11 int(11), # APPLY_LOCATION_12 int(11), # APPLY_LOCATION_13 int(11), # APPLY_LOCATION_14 int(11), # APPLY_LOCATION_15 int(11), # APPLY_LOCATION_16 int(11), # APPLY_LOCATION_17 int(11), # APPLY_LOCATION_18 int(11), # USE_MONTHS decimal(5, 0), # EXAM_TYPE decimal(10, 0), # SAME_SET_UNIT_FAULT_RATE_MONTH_1 decimal(10, 4), # SAME_SET_UNIT_FAULT_RATE_MONTH_2 decimal(10, 4), # SAME_SET_UNIT_FAULT_RATE_MONTH_3 decimal(10, 4), # SAME_SET_UNIT_FAULT_RATE_MONTH_4 decimal(10, 4), # SAME_SET_UNIT_FAULT_RATE_MONTH_5 decimal(10, 4), # SAME_SET_UNIT_FAULT_RATE_MONTH_6 decimal(10, 4), # SAME_MAKE_UNIT_FAULT_RATE_MONTH_1 decimal(10, 4), # SAME_MAKE_UNIT_FAULT_RATE_MONTH_2 decimal(10, 4), # SAME_MAKE_UNIT_FAULT_RATE_MONTH_3 decimal(10, 4), # SAME_MAKE_UNIT_FAULT_RATE_MONTH_4 decimal(10, 4), # SAME_MAKE_UNIT_FAULT_RATE_MONTH_5 decimal(10, 4), # SAME_MAKE_UNIT_FAULT_RATE_MONTH_6 decimal(10, 4), # FAULT_NUMBER_MONTH_1 decimal(5, 0), # FAULT_NUMBER_MONTH_2 decimal(5, 0), # FAULT_NUMBER_MONTH_3 decimal(5, 0), # FAULT_NUMBER_MONTH_4 decimal(5, 0), # FAULT_NUMBER_MONTH_5 decimal(5, 0), # FAULT_NUMBER_MONTH_6 decimal(5, 0), # SAME_INSP_ORG_FAULT_RATE_MONTH_1 decimal(10, 4), # SAME_INSP_ORG_FAULT_RATE_MONTH_2 decimal(10, 4), # SAME_INSP_ORG_FAULT_RATE_MONTH_3 decimal(10, 4), # SAME_INSP_ORG_FAULT_RATE_MONTH_4 decimal(10, 4), # SAME_INSP_ORG_FAULT_RATE_MONTH_5 decimal(10, 4), # SAME_INSP_ORG_FAULT_RATE_MONTH_6 decimal(10, 4), # SAME_USE_UNIT_FAULT_RATE_MONTH_1 decimal(10, 4), # SAME_USE_UNIT_FAULT_RATE_MONTH_2 decimal(10, 4), # SAME_USE_UNIT_FAULT_RATE_MONTH_3 decimal(10, 4), # SAME_USE_UNIT_FAULT_RATE_MONTH_4 decimal(10, 4), # SAME_USE_UNIT_FAULT_RATE_MONTH_5 decimal(10, 4), # SAME_USE_UNIT_FAULT_RATE_MONTH_6 decimal(10, 4), # SAME_WB_UNIT_FAULT_RATE_MONTH_1 decimal(10, 4), # SAME_WB_UNIT_FAULT_RATE_MONTH_2 decimal(10, 4), # SAME_WB_UNIT_FAULT_RATE_MONTH_3 decimal(10, 4), # SAME_WB_UNIT_FAULT_RATE_MONTH_4 decimal(10, 4), # SAME_WB_UNIT_FAULT_RATE_MONTH_5 decimal(10, 4), # SAME_WB_UNIT_FAULT_RATE_MONTH_6 decimal(10, 4) # ) # """ # cursor.execute(query) # conn.commit() # query = 'CREATE INDEX index_ele_info_feature ON dt_yc.model_ele_info_feature(TRANID)' # cursor.execute(query) # conn.commit() query = 'SELECT * FROM dt_yc.model_use_unit_fault_rate' cursor.execute(query) rows = cursor.fetchall() use_unit_fault_rate = {} for row in rows: use_unit_fault_rate[row[0]] = [ row[1], row[2], row[3], row[4], row[5], row[6] ] print('model_use_unit_fault_rate读取完毕') query = 'SELECT * FROM dt_yc.model_make_unit_fault_rate' cursor.execute(query) rows = cursor.fetchall() make_unit_fault_rate = {} for row in rows: make_unit_fault_rate[row[0]] = [ row[1], row[2], row[3], row[4], row[5], row[6] ] print('model_make_unit_fault_rate读取完毕') query = 'SELECT * FROM dt_yc.model_set_unit_fault_rate' cursor.execute(query) rows = cursor.fetchall() set_unit_fault_rate = {} for row in rows: set_unit_fault_rate[row[0]] = [ row[1], row[2], row[3], row[4], row[5], row[6] ] print('model_set_unit_fault_rate读取完毕') query = 'SELECT * FROM dt_yc.model_insp_org_fault_rate' cursor.execute(query) rows = cursor.fetchall() insp_org_fault_rate = {} for row in rows: insp_org_fault_rate[row[0]] = [ row[1], row[2], row[3], row[4], row[5], row[6] ] print('model_insp_org_fault_rate读取完毕') query = 'SELECT * FROM dt_yc.model_wb_unit_fault_rate' cursor.execute(query) rows = cursor.fetchall() wb_unit_fault_rate = {} for row in rows: wb_unit_fault_rate[row[0]] = [ row[1], row[2], row[3], row[4], row[5], row[6] ] print('model_wb_unit_fault_rate读取完毕') query = 'SELECT * FROM dt_yc.model_fault_num' cursor.execute(query) rows = cursor.fetchall() model_fault_num = {} for row in rows: tranid = row[0] month_num = row[1] fault_num = row[2] if model_fault_num.get(tranid, default) == default: model_fault_num[tranid] = [] model_fault_num[tranid].append([month_num, fault_num]) print('model_fault_num读取完毕') query = """ SELECT equ_safe_level, apply_location, use_start_date, exam_type, use_unit_code, make_unit_name, set_unit_name, insp_org_name, wb_unit_name, tranid FROM dt_yc.ele_info WHERE use_start_date is not null """ cursor.execute(query) rows = cursor.fetchall() print('ele_info读取完毕') csv_file_name = 'features/' + get_current_date() + '-feature.csv' csv_file = open(csv_file_name, 'w', newline='') writer = csv.writer(csv_file) cnt = 0 for row in rows: equ_safe_level = row[0] apply_location = row[1] use_start_date = row[2] exam_type = row[3] use_unit_code = row[4] make_unit_name = row[5] set_unit_name = row[6] insp_org_name = row[7] wb_unit_name = row[8] tranid = row[9] equ1 = 0 equ2 = 0 equ3 = 0 if equ_safe_level == '高风险': equ1 = 1 elif equ_safe_level == '中风险': equ2 = 1 elif equ_safe_level == '低风险': equ3 = 1 apply1 = 0 apply2 = 0 apply3 = 0 apply4 = 0 apply5 = 0 apply6 = 0 apply7 = 0 apply8 = 0 apply9 = 0 apply10 = 0 apply11 = 0 apply12 = 0 apply13 = 0 apply14 = 0 apply15 = 0 apply16 = 0 apply17 = 0 apply18 = 0 if apply_location == '其他场所': apply1 = 1 elif apply_location == '商场': apply2 = 1 elif apply_location == '宾馆': apply3 = 1 elif apply_location == '餐饮场所': apply4 = 1 elif apply_location == '医疗机构': apply5 = 1 elif apply_location == '学校': apply6 = 1 elif apply_location == '养老机构': apply7 = 1 elif apply_location == '幼儿园': apply8 = 1 elif apply_location == '展览馆': apply9 = 1 elif apply_location == '车站': apply10 = 1 elif apply_location == '公园': apply11 = 1 elif apply_location == '公共浴池': apply12 = 1 elif apply_location == '客运码头': apply13 = 1 elif apply_location == '机场': apply14 = 1 elif apply_location == '儿童活动中心': apply15 = 1 elif apply_location == '影剧院': apply16 = 1 elif apply_location == '图书馆': apply17 = 1 elif apply_location == '体育场馆': apply18 = 1 use = use_unit_fault_rate.get(use_unit_code, [0, 0, 0, 0, 0, 0]) make = make_unit_fault_rate.get(make_unit_name, [0, 0, 0, 0, 0, 0]) _set = set_unit_fault_rate.get(set_unit_name, [0, 0, 0, 0, 0, 0]) insp = insp_org_fault_rate.get(insp_org_name, [0, 0, 0, 0, 0, 0]) wb = wb_unit_fault_rate.get(wb_unit_name, [0, 0, 0, 0, 0, 0]) use1 = use[0] use2 = use[1] use3 = use[2] use4 = use[3] use5 = use[4] use6 = use[5] make1 = make[0] make2 = make[1] make3 = make[2] make4 = make[3] make5 = make[4] make6 = make[5] set1 = _set[0] set2 = _set[1] set3 = _set[2] set4 = _set[3] set5 = _set[4] set6 = _set[5] insp1 = insp[0] insp2 = insp[1] insp3 = insp[2] insp4 = insp[3] insp5 = insp[4] insp6 = insp[5] wb1 = wb[0] wb2 = wb[1] wb3 = wb[2] wb4 = wb[3] wb5 = wb[4] wb6 = wb[5] start_date = str(use_start_date) use_months = get_use_months(current_date, start_date) fault_nums = [0, 0, 0, 0, 0, 0] temp_fault_num = model_fault_num.get(tranid, default) if temp_fault_num != default: for row in temp_fault_num: month_num = row[0] fault_num = row[1] fault_nums[month_num - 1] = fault_num # insert = 'INSERT INTO dt_yc.model_ele_info_feature VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)' val = [ tranid, equ1, equ2, equ3, apply1, apply2, apply3, apply4, apply5, apply6, apply7, apply8, apply9, apply10, apply11, apply12, apply13, apply14, apply15, apply16, apply17, apply18, use_months, exam_type, set1, set2, set3, set4, set5, set6, make1, make2, make3, make4, make5, make6, fault_nums[0], fault_nums[1], fault_nums[2], fault_nums[3], fault_nums[4], fault_nums[5], insp1, insp2, insp3, insp4, insp5, insp6, use1, use2, use3, use4, use5, use6, wb1, wb2, wb3, wb4, wb5, wb6 ] # cursor.execute(insert, val) # conn.commit() writer.writerow(val) cnt = cnt + 1 if cnt % 10000 == 0: print('处理中,目前已完成' + str(cnt) + '条') cursor.close() conn.close() elapsed = (time.time() - start) print('电梯特征提取完毕,总运行时间:' + str(round(elapsed / 60, 2)) + '分钟') print('------------------------------------------------------') print('------------------------------------------------------')
def create_mambo_mode_table(input_file, db_id, mode_name, dataset_name, full_mode_file, output_dir, db_node_file, mambo_id_counter_start, node_index, verbose=False, delimiter=DELIMITER): # Process command line arguments, get default path names inFNm = input_file db_id = db_id mode_name = mode_name dataset = dataset_name outFNm = full_mode_file if outFNm is None: outFNm = os.path.join(output_dir, utils.get_full_mode_file_name(mode_name)) dbFNm = db_node_file if dbFNm is None: dbFNm = os.path.join( output_dir, utils.get_mode_file_name(mode_name, db_id, dataset)) counter = mambo_id_counter_start if counter == -1: counter = utils.get_max_id(outFNm) # Read input file, create output files. seen = set() if verbose: print 'Starting at mambo id: %d' % counter with open(inFNm, 'r') as inF, open(outFNm, 'a') as outF, open(dbFNm, 'w') as dbF: if counter == 0: outF.write('# Full mode table for %s\n' % mode_name) outF.write('# File generated on: %s\n' % utils.get_current_date()) outF.write('# mambo_nid%sdataset id\n' % delimiter) dbF.write('# Mode table for dataset: %s\n' % dataset) dbF.write('# File generated on: %s\n' % utils.get_current_date()) add_schema = True for line in inF: if line[0] in COMMENT: # skip comments continue vals = utils.split_then_strip(line, delimiter) if add_schema: attrs_schema = '# mambo_nid%sdataset_nid' % delimiter for i in range(len(vals)): if i != node_index: attrs_schema += '%sC%d' % (delimiter, i) dbF.write('%s\n' % attrs_schema) add_schema = False node_id = vals[node_index] if node_id in seen or len(node_id) == 0: continue attrs_str = '' for i in range(len(vals)): if i != node_index: attrs_str += delimiter + vals[i] outF.write('%d%s%d\n' % (counter, delimiter, db_id)) dbF.write('%d%s%s%s\n' % (counter, delimiter, node_id, attrs_str)) seen.add(node_id) counter += 1 if verbose: print 'Ending at mambo id: %d' % counter
def update_fault_number_month(): start = time.time() conn = pymysql.connect(host='10.214.163.179', user='******', password='******', port=3306, database='dt_yc') cursor = conn.cursor() # -------------------------------- 统计电梯故障数 -------------------------------- # 删除原先的故障数量表 query = 'DROP TABLE IF EXISTS dt_yc.model_fault_num' cursor.execute(query) conn.commit() # 建立新的故障数量表 query = """ CREATE TABLE dt_yc.model_fault_num( TRANID varchar(50), MONTH_NUM int, FAULT_NUM int, primary key(TRANID, MONTH_NUM) ) """ cursor.execute(query) conn.commit() # 为新的故障数量表创建索引 query = 'CREATE INDEX fault_num_index ON dt_yc.model_fault_num(TRANID, MONTH_NUM)' cursor.execute(query) conn.commit() # 统计电梯故障数 current_date = get_current_date() if current_date is not None: end_date = get_previous_diff_date(current_date, -1) for month_num in range(1, 7): start_date = get_previous_diff_date(end_date, 30) query = """ SELECT tranid, count(*) fault_num FROM dt_yc.zt_dt_fault WHERE form_create_time >= DATE_FORMAT(%s,'%%Y-%%m-%%d') and form_create_time < DATE_FORMAT(%s,'%%Y-%%m-%%d') and use_unit_code is not null and use_unit_code != '-' and use_unit_code != '不详' and make_unit_name is not null and make_unit_name != '-' and make_unit_name != '/' and set_unit_name is not null and set_unit_name != '-' and set_unit_name != '/' and insp_org_name is not null and insp_org_name != '-' and insp_org_name != '/' and wb_unit_name is not null and wb_unit_name != '/' and wb_unit_name != '*' and wb_unit_name != '0' and wb_unit_name != '//' and wb_unit_name != '-' and wb_unit_name != '--' and wb_unit_name != '**' and wb_unit_name != '1' GROUP BY tranid """ var = [start_date, end_date] cursor.execute(query, var) rows = cursor.fetchall() for row in rows: tranid = row[0] fault_num = row[1] insert = 'INSERT INTO dt_yc.model_fault_num VALUES(%s, %s, %s)' var = [tranid, month_num, fault_num] cursor.execute(insert, var) conn.commit() end_date = start_date cursor.close() conn.close() elapsed = (time.time() - start) print('电梯六个月内故障数量统计完毕,总运行时间:' + str(round(elapsed / 60, 2)) + '分钟') print('------------------------------------------------------') print('------------------------------------------------------')
def update_fault_rate_month(): # 作为dict访问的默认值 default = -1 start = time.time() flag = start conn = pymysql.connect(host='10.214.163.179', user='******', password='******', port=3306, database='dt_yc') cursor = conn.cursor() # -------------------------------- 统计电梯数量表格 -------------------------------- # 使用单位电梯数量 query1 = 'DROP TABLE IF EXISTS dt_yc.model_use_unit_ele_num' query2 = """ CREATE TABLE dt_yc.model_use_unit_ele_num SELECT USE_UNIT_CODE, COUNT(*) ELE_NUM FROM dt_yc.ele_info WHERE USE_UNIT_CODE != '-' and USE_UNIT_CODE != '不详' GROUP BY USE_UNIT_CODE """ query3 = 'CREATE INDEX index_use_unit_code ON dt_yc.model_use_unit_ele_num(`USE_UNIT_CODE`)' cursor.execute(query1) conn.commit() cursor.execute(query2) conn.commit() cursor.execute(query3) conn.commit() # 制造单位电梯数量 query1 = 'DROP TABLE IF EXISTS dt_yc.model_make_unit_ele_num' query2 = """ CREATE TABLE dt_yc.model_make_unit_ele_num SELECT MAKE_UNIT_NAME, COUNT(*) ELE_NUM FROM dt_yc.ele_info WHERE MAKE_UNIT_NAME is not null and MAKE_UNIT_NAME != '-' and MAKE_UNIT_NAME != '/' GROUP BY MAKE_UNIT_NAME """ query3 = 'CREATE INDEX index_make_unit_name ON dt_yc.model_make_unit_ele_num(`MAKE_UNIT_NAME`)' cursor.execute(query1) conn.commit() cursor.execute(query2) conn.commit() cursor.execute(query3) conn.commit() # 安装单位电梯数量 query1 = 'DROP TABLE IF EXISTS dt_yc.model_set_unit_ele_num' query2 = """ CREATE TABLE dt_yc.model_set_unit_ele_num SELECT SET_UNIT_NAME, COUNT(*) ELE_NUM FROM dt_yc.ele_info WHERE SET_UNIT_NAME is not null and SET_UNIT_NAME != '-' and SET_UNIT_NAME != '/' GROUP BY SET_UNIT_NAME """ query3 = 'CREATE INDEX index_set_unit_name ON dt_yc.model_set_unit_ele_num(`SET_UNIT_NAME`)' cursor.execute(query1) conn.commit() cursor.execute(query2) conn.commit() cursor.execute(query3) conn.commit() # 检验机构电梯数量 query1 = 'DROP TABLE IF EXISTS dt_yc.model_insp_org_ele_num' query2 = """ CREATE TABLE dt_yc.model_insp_org_ele_num SELECT INSP_ORG_NAME, COUNT(*) ELE_NUM FROM dt_yc.ele_info WHERE INSP_ORG_NAME is not null and INSP_ORG_NAME != '-' and INSP_ORG_NAME != '/' GROUP BY INSP_ORG_NAME """ query3 = 'CREATE INDEX index_insp_org_name ON dt_yc.model_insp_org_ele_num(`INSP_ORG_NAME`)' cursor.execute(query1) conn.commit() cursor.execute(query2) conn.commit() cursor.execute(query3) conn.commit() # 维保单位电梯数量 query1 = 'DROP TABLE IF EXISTS dt_yc.model_wb_unit_ele_num' query2 = """ CREATE TABLE dt_yc.model_wb_unit_ele_num SELECT WB_UNIT_NAME, COUNT(*) ELE_NUM FROM dt_yc.ele_info WHERE WB_UNIT_NAME is not null and WB_UNIT_NAME != '/' and WB_UNIT_NAME != '*' and WB_UNIT_NAME != '0' and WB_UNIT_NAME != '//' and WB_UNIT_NAME != '-' and WB_UNIT_NAME != '--' and WB_UNIT_NAME != '**' and WB_UNIT_NAME != '1' GROUP BY WB_UNIT_NAME """ query3 = 'CREATE INDEX index_wb_unit_name ON dt_yc.model_wb_unit_ele_num(`WB_UNIT_NAME`)' cursor.execute(query1) conn.commit() cursor.execute(query2) conn.commit() cursor.execute(query3) conn.commit() temp = time.time() print('电梯数量统计完成,运行时间:' + str(temp - flag)) flag = temp # -------------------------------- 统计故障电梯数量 -------------------------------- # 删除原先错误信息表格 query1 = 'DROP TABLE IF EXISTS dt_yc.model_use_unit_fault_num' query2 = 'DROP TABLE IF EXISTS dt_yc.model_make_unit_fault_num' query3 = 'DROP TABLE IF EXISTS dt_yc.model_set_unit_fault_num' query4 = 'DROP TABLE IF EXISTS dt_yc.model_insp_org_fault_num' query5 = 'DROP TABLE IF EXISTS dt_yc.model_wb_unit_fault_num' cursor.execute(query1) conn.commit() cursor.execute(query2) conn.commit() cursor.execute(query3) conn.commit() cursor.execute(query4) conn.commit() cursor.execute(query5) conn.commit() # 建立错误信息表格 query1 = """ CREATE TABLE dt_yc.model_use_unit_fault_num( USE_UNIT_CODE varchar(50), MONTH_NUM int, FAULT_NUM int ) """ query2 = """ CREATE TABLE dt_yc.model_make_unit_fault_num( MAKE_UNIT_NAME varchar(50), MONTH_NUM int, FAULT_NUM int ) """ query3 = """ CREATE TABLE dt_yc.model_set_unit_fault_num( SET_UNIT_NAME varchar(50), MONTH_NUM int, FAULT_NUM int ) """ query4 = """ CREATE TABLE dt_yc.model_insp_org_fault_num( INSP_ORG_NAME varchar(50), MONTH_NUM int, FAULT_NUM int ) """ query5 = """ CREATE TABLE dt_yc.model_wb_unit_fault_num( WB_UNIT_NAME varchar(50), MONTH_NUM int, FAULT_NUM int ) """ cursor.execute(query1) conn.commit() cursor.execute(query2) conn.commit() cursor.execute(query3) conn.commit() cursor.execute(query4) conn.commit() cursor.execute(query5) conn.commit() # 为新建表格创建索引 query1 = 'CREATE INDEX use_unit_fault_num_index ON dt_yc.model_use_unit_fault_num(USE_UNIT_CODE, MONTH_NUM)' query2 = 'CREATE INDEX make_unit_fault_num_index ON dt_yc.model_make_unit_fault_num(MAKE_UNIT_NAME, MONTH_NUM)' query3 = 'CREATE INDEX set_unit_fault_num_index ON dt_yc.model_set_unit_fault_num(SET_UNIT_NAME, MONTH_NUM)' query4 = 'CREATE INDEX insp_org_fault_num_index ON dt_yc.model_insp_org_fault_num(INSP_ORG_NAME, MONTH_NUM)' query5 = 'CREATE INDEX wb_unit_fault_num_index ON dt_yc.model_wb_unit_fault_num(WB_UNIT_NAME, MONTH_NUM)' cursor.execute(query1) conn.commit() cursor.execute(query2) conn.commit() cursor.execute(query3) conn.commit() cursor.execute(query4) conn.commit() cursor.execute(query5) conn.commit() # 统计故障电梯数量 current_date = get_current_date() if current_date is not None: end_date = get_previous_diff_date(current_date, -1) for i in range(1, 7): start_date = get_previous_diff_date(end_date, 30) # 使用单位错误信息聚合 query = """ SELECT use_unit_code, count(*) fault_num FROM dt_yc.zt_dt_fault WHERE form_create_time >= DATE_FORMAT(%s,'%%Y-%%m-%%d') and form_create_time < DATE_FORMAT(%s,'%%Y-%%m-%%d') and use_unit_code is not null and use_unit_code != '-' and use_unit_code != '不详' and make_unit_name is not null and make_unit_name != '-' and make_unit_name != '/' and set_unit_name is not null and set_unit_name != '-' and set_unit_name != '/' and insp_org_name is not null and insp_org_name != '-' and insp_org_name != '/' and wb_unit_name is not null and wb_unit_name != '/' and wb_unit_name != '*' and wb_unit_name != '0' and wb_unit_name != '//' and wb_unit_name != '-' and wb_unit_name != '--' and wb_unit_name != '**' and wb_unit_name != '1' GROUP BY use_unit_code """ var = [start_date, end_date] cursor.execute(query, var) rows = cursor.fetchall() for row in rows: use_unit_code = row[0] if use_unit_code is None: continue fault_num = row[1] insert = 'INSERT INTO dt_yc.model_use_unit_fault_num VALUES(%s, %s, %s)' var = [use_unit_code, i, fault_num] cursor.execute(insert, var) conn.commit() # 制造单位错误信息聚合 query = """ SELECT make_unit_name, count(*) fault_num FROM dt_yc.zt_dt_fault WHERE form_create_time >= DATE_FORMAT(%s,'%%Y-%%m-%%d') and form_create_time < DATE_FORMAT(%s,'%%Y-%%m-%%d') and use_unit_code is not null and use_unit_code != '-' and use_unit_code != '不详' and make_unit_name is not null and make_unit_name != '-' and make_unit_name != '/' and set_unit_name is not null and set_unit_name != '-' and set_unit_name != '/' and insp_org_name is not null and insp_org_name != '-' and insp_org_name != '/' and wb_unit_name is not null and wb_unit_name != '/' and wb_unit_name != '*' and wb_unit_name != '0' and wb_unit_name != '//' and wb_unit_name != '-' and wb_unit_name != '--' and wb_unit_name != '**' and wb_unit_name != '1' GROUP BY make_unit_name """ var = [start_date, end_date] cursor.execute(query, var) rows = cursor.fetchall() for row in rows: make_unit_name = row[0] if make_unit_name is None: continue fault_num = row[1] insert = 'INSERT INTO dt_yc.model_make_unit_fault_num VALUES(%s, %s, %s)' var = [make_unit_name, i, fault_num] cursor.execute(insert, var) conn.commit() # 安装单位错误信息聚合 query = """ SELECT set_unit_name, count(*) fault_num FROM dt_yc.zt_dt_fault WHERE form_create_time >= DATE_FORMAT(%s,'%%Y-%%m-%%d') and form_create_time < DATE_FORMAT(%s,'%%Y-%%m-%%d') and use_unit_code is not null and use_unit_code != '-' and use_unit_code != '不详' and make_unit_name is not null and make_unit_name != '-' and make_unit_name != '/' and set_unit_name is not null and set_unit_name != '-' and set_unit_name != '/' and insp_org_name is not null and insp_org_name != '-' and insp_org_name != '/' and wb_unit_name is not null and wb_unit_name != '/' and wb_unit_name != '*' and wb_unit_name != '0' and wb_unit_name != '//' and wb_unit_name != '-' and wb_unit_name != '--' and wb_unit_name != '**' and wb_unit_name != '1' GROUP BY set_unit_name """ var = [start_date, end_date] cursor.execute(query, var) rows = cursor.fetchall() for row in rows: set_unit_name = row[0] if set_unit_name is None: continue fault_num = row[1] insert = 'INSERT INTO dt_yc.model_set_unit_fault_num VALUES(%s, %s, %s)' var = [set_unit_name, i, fault_num] cursor.execute(insert, var) conn.commit() # 检验机构错误信息聚合 query = """ SELECT insp_org_name, count(*) fault_num FROM dt_yc.zt_dt_fault WHERE form_create_time >= DATE_FORMAT(%s,'%%Y-%%m-%%d') and form_create_time < DATE_FORMAT(%s,'%%Y-%%m-%%d') and use_unit_code is not null and use_unit_code != '-' and use_unit_code != '不详' and make_unit_name is not null and make_unit_name != '-' and make_unit_name != '/' and set_unit_name is not null and set_unit_name != '-' and set_unit_name != '/' and insp_org_name is not null and insp_org_name != '-' and insp_org_name != '/' and wb_unit_name is not null and wb_unit_name != '/' and wb_unit_name != '*' and wb_unit_name != '0' and wb_unit_name != '//' and wb_unit_name != '-' and wb_unit_name != '--' and wb_unit_name != '**' and wb_unit_name != '1' GROUP BY insp_org_name """ var = [start_date, end_date] cursor.execute(query, var) rows = cursor.fetchall() for row in rows: insp_org_name = row[0] if insp_org_name is None: continue fault_num = row[1] insert = 'INSERT INTO dt_yc.model_insp_org_fault_num VALUES(%s, %s, %s)' var = [insp_org_name, i, fault_num] cursor.execute(insert, var) conn.commit() # 维保单位错误信息聚合 query = """ SELECT wb_unit_name, count(*) fault_num FROM dt_yc.zt_dt_fault WHERE form_create_time >= DATE_FORMAT(%s,'%%Y-%%m-%%d') and form_create_time < DATE_FORMAT(%s,'%%Y-%%m-%%d') and use_unit_code is not null and use_unit_code != '-' and use_unit_code != '不详' and make_unit_name is not null and make_unit_name != '-' and make_unit_name != '/' and set_unit_name is not null and set_unit_name != '-' and set_unit_name != '/' and insp_org_name is not null and insp_org_name != '-' and insp_org_name != '/' and wb_unit_name is not null and wb_unit_name != '/' and wb_unit_name != '*' and wb_unit_name != '0' and wb_unit_name != '//' and wb_unit_name != '-' and wb_unit_name != '--' and wb_unit_name != '**' and wb_unit_name != '1' GROUP BY wb_unit_name """ var = [start_date, end_date] cursor.execute(query, var) rows = cursor.fetchall() for row in rows: wb_unit_name = row[0] if wb_unit_name is None: continue fault_num = row[1] insert = 'INSERT INTO dt_yc.model_wb_unit_fault_num VALUES(%s, %s, %s)' var = [wb_unit_name, i, fault_num] cursor.execute(insert, var) conn.commit() end_date = start_date print('第' + str(i) + '个月数据处理完成') temp = time.time() print('故障电梯数量统计完毕,运行时间:' + str(temp - flag)) flag = temp # -------------------------------- 将电梯数量读入内存 -------------------------------- # 读出dt_yc.model_use_unit_ele_num中的数据并存入use_unit_ele_num中 query = 'SELECT * FROM dt_yc.model_use_unit_ele_num' cursor.execute(query) use_unit_ele_num = cursor.fetchall() # 读出dt_yc.model_make_unit_ele_num中的数据并存入make_unit_ele_num中 query = 'SELECT * FROM dt_yc.model_make_unit_ele_num' cursor.execute(query) make_unit_ele_num = cursor.fetchall() # 读出dt_yc.model_set_unit_ele_num中的数据并存入set_unit_ele_num中 query = 'SELECT * FROM dt_yc.model_set_unit_ele_num' cursor.execute(query) set_unit_ele_num = cursor.fetchall() # 读出dt_yc.model_wb_unit_ele_num中的数据并存入wb_unit_ele_num中 query = 'SELECT * FROM dt_yc.model_wb_unit_ele_num' cursor.execute(query) wb_unit_ele_num = cursor.fetchall() # 读出dt_yc.model_insp_org_ele_num中的数据并存入insp_org_ele_num中 query = 'SELECT * FROM dt_yc.model_insp_org_ele_num' cursor.execute(query) insp_org_ele_num = cursor.fetchall() temp = time.time() print('电梯数量信息读取完毕,运行时间:' + str(temp - flag)) flag = temp # -------------------------------- 故障率计算 -------------------------------- # 删除原先故障率表格 query1 = 'DROP TABLE IF EXISTS dt_yc.model_use_unit_fault_rate' query2 = 'DROP TABLE IF EXISTS dt_yc.model_make_unit_fault_rate' query3 = 'DROP TABLE IF EXISTS dt_yc.model_set_unit_fault_rate' query4 = 'DROP TABLE IF EXISTS dt_yc.model_insp_org_fault_rate' query5 = 'DROP TABLE IF EXISTS dt_yc.model_wb_unit_fault_rate' cursor.execute(query1) conn.commit() cursor.execute(query2) conn.commit() cursor.execute(query3) conn.commit() cursor.execute(query4) conn.commit() cursor.execute(query5) conn.commit() # 建立故障率信息表格 query1 = """ CREATE TABLE dt_yc.model_use_unit_fault_rate( USE_UNIT_CODE varchar(50), FAULT_RATE_MONTH_1 decimal(10, 4), FAULT_RATE_MONTH_2 decimal(10, 4), FAULT_RATE_MONTH_3 decimal(10, 4), FAULT_RATE_MONTH_4 decimal(10, 4), FAULT_RATE_MONTH_5 decimal(10, 4), FAULT_RATE_MONTH_6 decimal(10, 4) ) """ query2 = """ CREATE TABLE dt_yc.model_make_unit_fault_rate( MAKE_UNIT_NAME varchar(50), FAULT_RATE_MONTH_1 decimal(10, 4), FAULT_RATE_MONTH_2 decimal(10, 4), FAULT_RATE_MONTH_3 decimal(10, 4), FAULT_RATE_MONTH_4 decimal(10, 4), FAULT_RATE_MONTH_5 decimal(10, 4), FAULT_RATE_MONTH_6 decimal(10, 4) ) """ query3 = """ CREATE TABLE dt_yc.model_set_unit_fault_rate( SET_UNIT_NAME varchar(50), FAULT_RATE_MONTH_1 decimal(10, 4), FAULT_RATE_MONTH_2 decimal(10, 4), FAULT_RATE_MONTH_3 decimal(10, 4), FAULT_RATE_MONTH_4 decimal(10, 4), FAULT_RATE_MONTH_5 decimal(10, 4), FAULT_RATE_MONTH_6 decimal(10, 4) ) """ query4 = """ CREATE TABLE dt_yc.model_insp_org_fault_rate( INSP_ORG_NAME varchar(50), FAULT_RATE_MONTH_1 decimal(10, 4), FAULT_RATE_MONTH_2 decimal(10, 4), FAULT_RATE_MONTH_3 decimal(10, 4), FAULT_RATE_MONTH_4 decimal(10, 4), FAULT_RATE_MONTH_5 decimal(10, 4), FAULT_RATE_MONTH_6 decimal(10, 4) ) """ query5 = """ CREATE TABLE dt_yc.model_wb_unit_fault_rate( WB_UNIT_NAME varchar(50), FAULT_RATE_MONTH_1 decimal(10, 4), FAULT_RATE_MONTH_2 decimal(10, 4), FAULT_RATE_MONTH_3 decimal(10, 4), FAULT_RATE_MONTH_4 decimal(10, 4), FAULT_RATE_MONTH_5 decimal(10, 4), FAULT_RATE_MONTH_6 decimal(10, 4) ) """ cursor.execute(query1) conn.commit() cursor.execute(query2) conn.commit() cursor.execute(query3) conn.commit() cursor.execute(query4) conn.commit() cursor.execute(query5) conn.commit() # 为新建表格创建索引 query1 = 'CREATE INDEX use_unit_fault_rate_index ON dt_yc.model_use_unit_fault_rate(use_unit_code)' query2 = 'CREATE INDEX make_unit_fault_rate_index ON dt_yc.model_make_unit_fault_rate(make_unit_name)' query3 = 'CREATE INDEX set_unit_fault_rate_index ON dt_yc.model_set_unit_fault_rate(set_unit_name)' query4 = 'CREATE INDEX insp_org_fault_rate_index ON dt_yc.model_insp_org_fault_rate(insp_org_name)' query5 = 'CREATE INDEX wb_unit_fault_rate_index ON dt_yc.model_wb_unit_fault_rate(wb_unit_name)' cursor.execute(query1) conn.commit() cursor.execute(query2) conn.commit() cursor.execute(query3) conn.commit() cursor.execute(query4) conn.commit() cursor.execute(query5) conn.commit() temp = time.time() # 读出dt_yc.model_use_unit_fault_num中的数据并存入use_unit_fault_num中 use_unit_fault_num = {} query = 'SELECT * FROM dt_yc.model_use_unit_fault_num' cursor.execute(query) rows = cursor.fetchall() for row in rows: use_unit_code = row[0] month_num = row[1] fault_num = row[2] if use_unit_fault_num.get(use_unit_code, default) == -1: use_unit_fault_num[use_unit_code] = {} use_unit_fault_num[use_unit_code][month_num] = fault_num # 读出dt_yc.model_make_unit_fault_num中的数据并存入make_unit_fault_num中 make_unit_fault_num = {} query = 'SELECT * FROM dt_yc.model_make_unit_fault_num' cursor.execute(query) rows = cursor.fetchall() for row in rows: make_unit_name = row[0] month_num = row[1] fault_num = row[2] if make_unit_fault_num.get(make_unit_name, default) == -1: make_unit_fault_num[make_unit_name] = {} make_unit_fault_num[make_unit_name][month_num] = fault_num # 读出dt_yc.model_set_unit_fault_num中的数据并存入set_unit_fault_num中 set_unit_fault_num = {} query = 'SELECT * FROM dt_yc.model_set_unit_fault_num' cursor.execute(query) rows = cursor.fetchall() for row in rows: set_unit_name = row[0] month_num = row[1] fault_num = row[2] if set_unit_fault_num.get(set_unit_name, default) == -1: set_unit_fault_num[set_unit_name] = {} set_unit_fault_num[set_unit_name][month_num] = fault_num # 读出dt_yc.model_insp_org_fault_num中的数据并存入insp_org_fault_num中 insp_org_fault_num = {} query = 'SELECT * FROM dt_yc.model_insp_org_fault_num' cursor.execute(query) rows = cursor.fetchall() for row in rows: insp_org_name = row[0] month_num = row[1] fault_num = row[2] if insp_org_fault_num.get(insp_org_name, default) == -1: insp_org_fault_num[insp_org_name] = {} insp_org_fault_num[insp_org_name][month_num] = fault_num # 读出dt_yc.model_wb_unit_fault_num中的数据并存入wb_unit_fault_num中 wb_unit_fault_num = {} query = 'SELECT * FROM dt_yc.model_wb_unit_fault_num' cursor.execute(query) rows = cursor.fetchall() for row in rows: wb_unit_name = row[0] month_num = row[1] fault_num = row[2] if wb_unit_fault_num.get(wb_unit_name, default) == -1: wb_unit_fault_num[wb_unit_name] = {} wb_unit_fault_num[wb_unit_name][month_num] = fault_num temp = time.time() print('电梯故障信息读取完毕,运行时间:' + str(temp - flag)) flag = temp # 使用单位故障率计算 for use_unit in use_unit_ele_num: use_unit_code = use_unit[0] ele_num = use_unit[1] fault_info = use_unit_fault_num.get(use_unit_code, None) var = [] var.append(use_unit_code) for month_num in range(1, 7): if fault_info is None: fault_num = 0 else: fault_num = fault_info.get(month_num, 0) fault_rate = fault_num / ele_num fault_rate = round(fault_rate, 4) var.append(fault_rate) # 提高效率,六个月内无故障的单位不进行插入 if var[1:] == [0, 0, 0, 0, 0, 0]: continue insert = 'INSERT INTO dt_yc.model_use_unit_fault_rate VALUES(%s, %s, %s, %s, %s, %s, %s)' cursor.execute(insert, var) conn.commit() temp = time.time() print('使用单位故障率计算完毕,运行时间:' + str(temp - flag)) flag = temp # 制造单位故障率计算 for make_unit in make_unit_ele_num: make_unit_name = make_unit[0] ele_num = make_unit[1] fault_info = make_unit_fault_num.get(make_unit_name, None) var = [] var.append(make_unit_name) for month_num in range(1, 7): if fault_info is None: fault_num = 0 else: fault_num = fault_info.get(month_num, 0) fault_rate = fault_num / ele_num fault_rate = round(fault_rate, 4) var.append(fault_rate) # 提高效率,六个月内无故障的单位不进行插入 if var[1:] == [0, 0, 0, 0, 0, 0]: continue insert = 'INSERT INTO dt_yc.model_make_unit_fault_rate VALUES(%s, %s, %s, %s, %s, %s, %s)' cursor.execute(insert, var) conn.commit() temp = time.time() print('制造单位故障率计算完毕,运行时间:' + str(temp - flag)) flag = temp # 安装单位故障率计算 for set_unit in set_unit_ele_num: set_unit_name = set_unit[0] ele_num = set_unit[1] fault_info = set_unit_fault_num.get(set_unit_name, None) var = [] var.append(set_unit_name) for month_num in range(1, 7): if fault_info is None: fault_num = 0 else: fault_num = fault_info.get(month_num, 0) fault_rate = fault_num / ele_num fault_rate = round(fault_rate, 4) var.append(fault_rate) # 提高效率,六个月内无故障的单位不进行插入 if var[1:] == [0, 0, 0, 0, 0, 0]: continue insert = 'INSERT INTO dt_yc.model_set_unit_fault_rate VALUES(%s, %s, %s, %s, %s, %s, %s)' cursor.execute(insert, var) conn.commit() temp = time.time() print('安装单位故障率计算完毕,运行时间:' + str(temp - flag)) flag = temp # 检验机构故障率计算 for insp_org in insp_org_ele_num: insp_org_name = insp_org[0] ele_num = insp_org[1] fault_info = insp_org_fault_num.get(insp_org_name, None) var = [] var.append(insp_org_name) for month_num in range(1, 7): if fault_info is None: fault_num = 0 else: fault_num = fault_info.get(month_num, 0) fault_rate = fault_num / ele_num fault_rate = round(fault_rate, 4) var.append(fault_rate) # 提高效率,六个月内无故障的单位不进行插入 if var[1:] == [0, 0, 0, 0, 0, 0]: continue insert = 'INSERT INTO dt_yc.model_insp_org_fault_rate VALUES(%s, %s, %s, %s, %s, %s, %s)' cursor.execute(insert, var) conn.commit() temp = time.time() print('检验机构故障率计算完毕,运行时间:' + str(temp - flag)) flag = temp # 维保单位故障率计算 for wb_unit in wb_unit_ele_num: wb_unit_name = wb_unit[0] ele_num = wb_unit[1] fault_info = wb_unit_fault_num.get(wb_unit_name, None) var = [] var.append(wb_unit_name) for month_num in range(1, 7): if fault_info is None: fault_num = 0 else: fault_num = fault_info.get(month_num, 0) fault_rate = fault_num / ele_num fault_rate = round(fault_rate, 4) var.append(fault_rate) # 提高效率,六个月内无故障的单位不进行插入 if var[1:] == [0, 0, 0, 0, 0, 0]: continue insert = 'INSERT INTO dt_yc.model_wb_unit_fault_rate VALUES(%s, %s, %s, %s, %s, %s, %s)' cursor.execute(insert, var) conn.commit() temp = time.time() print('维保单位故障率计算完毕,运行时间:' + str(temp - flag)) flag = temp cursor.close() conn.close() elapsed = (time.time() - start) print('使用、制造、安装、维保单位与检验机构故障率统计完毕,总运行时间:' + str(round(elapsed / 60, 2)) + '分钟') print('------------------------------------------------------') print('------------------------------------------------------')
config.SPOTIFY_REDIRECT_URI, config.SPOTIFY_SCOPE) if not spotify_auth.main(): sys.exit("Refresh token could not taken.") # api spotify_api_obj = SpotifyAPI(config.SPOTIFY_CLIENT_ID, config.SPOTIFY_CLIENT_SECRET, config.SPOTIFY_USER, spotify_auth.refresh_token) # radio stations radio_stations_obj = RadioStations() # playlist_date = get_current_date() playlist_id = False temporary_added_tracks = [] while True: # token if spotify_api_obj.is_token_expired(): spotify_api_obj.refresh_access_token() # playlist current_date = get_current_date() if (not playlist_id) or (playlist_date != current_date): playlist_date = current_date playlist_name = f'{config.PLAYLIST_NAME} - {playlist_date}' playlist_id = spotify_api_obj.is_playlist_exist(playlist_name) if not playlist_id:
ds1_mapping = utils.read_mode_file(dsFile1) if os.path.samefile(dsFile1, dsFile2): ds2_mapping = ds1_mapping else: ds2_mapping = utils.read_mode_file(dsFile2) add_header = True if os.path.isfile(outFNm): add_header = False with open(outFNm, 'a') as equivF: if add_header: equivF.write('# Equivalence table for mode %s\n' % mode_name) equivF.write('# File generated on: %s\n' % utils.get_current_date()) equivF.write('# snap_nid_1\tsnap_nid_2\n') if inFNm is not None: with open(inFNm, 'r') as inF: for line in inF: if line[0] == '#' or line[0] == '\n': continue vals = utils.split_then_strip(line, '\t') id1 = vals[ds1Idx] id2 = vals[ds2Idx] if id1 == '' or id2 == '': continue if args.skip_missing_ids and (id1 not in ds1_mapping or id2 not in ds2_mapping): continue equivF.write('%d\t%d\n' % (ds1_mapping[id1], ds2_mapping[id2])) else:
def get_aggrs(threshold=0.5): conn = pymysql.connect(host='10.214.163.179', user='******', password='******', port=3306, database='dt_yc') cursor = conn.cursor() current_date = get_current_date() # 从csv中读取得到current_date的预测结果 csv_file = open('results/' + str(current_date) + '-result.csv', 'r') reader = csv.reader(csv_file) rows = list(reader) # 得到预测为高危的电梯集合 high_risk_tranid_set = [] for row in rows: if row[0] == 'tranid': continue if float(row[1]) >= threshold: high_risk_tranid_set.append(row[0]) # 读取ele_info中的内容 query = """ SELECT apply_location, use_start_date, tranid FROM dt_yc.ele_info WHERE use_start_date is not null AND use_unit_code is not null AND use_unit_code != '-' AND use_unit_code != '不详' AND make_unit_name is not null AND make_unit_name != '-' AND make_unit_name != '/' AND set_unit_name is not null AND set_unit_name != '-' AND set_unit_name != '/' AND insp_org_name is not null AND insp_org_name != '-' AND insp_org_name != '/' AND wb_unit_name is not null AND wb_unit_name != '/' AND wb_unit_name != '*' AND wb_unit_name != '0' AND wb_unit_name != '//' AND wb_unit_name != '-' AND wb_unit_name != '--' AND wb_unit_name != '**' AND wb_unit_name != '1' """ cursor.execute(query) rows = cursor.fetchall() ele_info = {} for row in rows: apply_location = row[0] use_start_date = str(row[1]) tranid = row[2] use_months = get_use_months(current_date, use_start_date) ele_info[tranid] = [apply_location, use_months] # 高危电梯地区信息聚合(信息缺失,暂时无法聚合) # 高危电梯使用月数信息聚合(分别存储0-50,50-100,100-150,150-200,200-250,250-300,300以上的数据) use_months_aggr = [0, 0, 0, 0, 0, 0, 0] for high_risk_tranid in high_risk_tranid_set: info = ele_info.get(high_risk_tranid, []) if len(info) == 0: continue else: use_months = info[1] if use_months in range(0, 50): use_months_aggr[0] += 1 elif use_months in range(50, 100): use_months_aggr[1] += 1 elif use_months in range(100, 150): use_months_aggr[2] += 1 elif use_months in range(150, 200): use_months_aggr[3] += 1 elif use_months in range(200, 250): use_months_aggr[4] += 1 elif use_months in range(250, 300): use_months_aggr[5] += 1 elif use_months >= 300: use_months_aggr[6] += 1 # 将聚合结果插入数据库中 for i in range(len(use_months_aggr)): amount = use_months_aggr[i] lower_use_month = i * 50 upper_use_month = (i + 1) * 50 if i == 6: upper_use_month = 100000000 insert = "INSERT INTO dt_yc.model_use_months_aggr VALUES(DATE_FORMAT(%s, '%%Y-%%m-%%d'), %s, %s, %s)" val = [current_date, lower_use_month, upper_use_month, amount] cursor.execute(insert, val) conn.commit() print('高危电梯使用月数信息聚合完毕') # 高危电梯投用场所信息聚合 apply_loc_aggr = {} for high_risk_tranid in high_risk_tranid_set: info = ele_info.get(high_risk_tranid, []) if len(info) == 0: continue else: apply_location = info[0] if apply_loc_aggr.get(apply_location, -1) == -1: apply_loc_aggr[apply_location] = 1 else: apply_loc_aggr[apply_location] += 1 # 将聚合结果插入数据库中 for apply_location in apply_loc_aggr: amount = apply_loc_aggr[apply_location] insert = "INSERT INTO dt_yc.model_apply_loc_aggr VALUES(DATE_FORMAT(%s, '%%Y-%%m-%%d'), %s, %s)" val = [current_date, apply_location, amount] cursor.execute(insert, val) conn.commit() print('高危电梯投用场所信息聚合完毕') print('高危电梯信息聚合完毕') print('------------------------------------------------------') print('------------------------------------------------------') cursor.close() conn.close()