def search_status_pcap(): status = 0 db_proxy = DbProxy(CONFIG_DB_NAME) sql_search = "SELECT * from pcap_down_status;" result, rows = db_proxy.read_db(sql_search) flag = rows[0][1] pcap_name = rows[0][2] pcap_orig_size = rows[0][3] pcap_orig_time = rows[0][4] pcap_cur_size = rows[0][5] pcap_cur_time = rows[0][6] if flag == 1: status = 1 data = { 'status': status, 'pcap_name': pcap_name, 'pcap_orig_size': pcap_orig_size, 'pcap_orig_time': pcap_orig_time, 'pcap_cur_size': pcap_cur_size, 'pcap_cur_time': pcap_cur_time } return jsonify(data) else: return jsonify({'status': status})
def get_all_switch_mac_port(): db_proxy = DbProxy(CONFIG_DB_NAME) sql_str = "select snmp_version,group_name,security_level,security_name,auth_mode,auth_pwd,priv_mode,priv_pwd,ssh_name,ssh_pwd, ip,name from switch_info" res, rows = db_proxy.read_db(sql_str) if len(rows) > 0: for row in rows: autoGetSwitchInfo.get_one_switch_mac_port(row)
def creat_smtpflowdatas_tables(): db=DbProxy() try: sql_str="""CREATE TABLE `smtpflowdatas` ( `smtpFlowdataId` int(11) NOT NULL AUTO_INCREMENT, `flowdataHeadId` int(11) DEFAULT NULL, `souceMailAddress` longtext, `destMailAddress` longtext, `MailTitle` longtext, `protocolDetail` int(11) DEFAULT NULL, `packetLenth` longtext, `packetTimestamp` int(11), `createdAt` longtext, `flowTimestamp` varchar(30), `tableNum` int(5) DEFAULT NULL, `direction` int(11) DEFAULT NULL, `packetTimestampint` bigint DEFAULT NULL, PRIMARY KEY (`smtpFlowdataId`), index `time_index` (`flowTimestamp`), index `head_index` (`flowdataHeadId`), index `packet_time_index` (`packetTimestamp`), index `packet_time_int_index` (`packetTimestampint`) ) ENGINE=MERGE UNION=(smtpflowdatas_0,smtpflowdatas_1,smtpflowdatas_2,smtpflowdatas_3,smtpflowdatas_4,smtpflowdatas_5,smtpflowdatas_6,smtpflowdatas_7,smtpflowdatas_8,smtpflowdatas_9) INSERT_METHOD=NO AUTO_INCREMENT=1 DEFAULT CHARSET=utf8;""" db.write_db(sql_str) logger.info("creat table smtpflowdatas success.") except: logger.error("creat table smtpflowdatas error.")
def creat_gooseflowdatas_tables(): db = DbProxy() try: sql_str = """CREATE TABLE `gooseflowdatas` ( `gooseFlowdataId` int(11) NOT NULL AUTO_INCREMENT, `flowdataHeadId` int(11) DEFAULT NULL, `datSet` longtext, `goID` longtext, `allData` longtext, `packetLenth` int(11) DEFAULT NULL, `packetTimestamp` int(11), `createdAt` longtext, `flowTimestamp` varchar(30), `tableNum` int(5) DEFAULT NULL, `direction` int(11) DEFAULT NULL, `packetTimestampint` bigint DEFAULT NULL, PRIMARY KEY (`gooseFlowdataId`), index `time_index` (`flowTimestamp`), index `head_index` (`flowdataHeadId`), index `packet_time_index` (`packetTimestamp`), index `packet_time_int_index` (`packetTimestampint`) ) ENGINE=MERGE UNION=(gooseflowdatas_0,gooseflowdatas_1,gooseflowdatas_2,gooseflowdatas_3,gooseflowdatas_4,gooseflowdatas_5,gooseflowdatas_6,gooseflowdatas_7,gooseflowdatas_8,gooseflowdatas_9) INSERT_METHOD=NO AUTO_INCREMENT=1 DEFAULT CHARSET=utf8;""" db.write_db(sql_str) logger.info("add_gooseflowdatas_tables success.") except: logger.error("add_gooseflowdatas_tables error.")
def add_s7plusflowdatas_tables(): db=DbProxy() try: for table_id in range(0, 10): sql_str="""CREATE TABLE `s7plusflowdatas_%d` ( `s7plusFlowdataId` int(11) NOT NULL AUTO_INCREMENT, `flowdataHeadId` int(11) DEFAULT NULL, `opcode` longtext, `funcCode` longtext, `reserved1` longtext, `reserved2` longtext, `reserved3` longtext, `reserved4` longtext, `reserved5` longtext, `reserved6` longtext, `packetLenth` int(11) DEFAULT NULL, `packetTimestamp` int(11), `createdAt` longtext, `flowTimestamp` varchar(30), `tableNum` int(5) DEFAULT 0, `direction` int(11) DEFAULT NULL, `packetTimestampint` bigint DEFAULT NULL, PRIMARY KEY (`s7plusFlowdataId`), index `time_index` (`flowTimestamp`), index `packet_time_index` (`packetTimestamp`) ) ENGINE=MYISAM DEFAULT CHARSET=utf8;""" % (table_id) db.write_db(sql_str) MysqlDbUpgradeC.create_s7plusflowdatas_tables() logger.info("add_s7plusflowdatas_tables success.") except: logger.error("add_s7plusflowdatas_tables error.")
def create_s7plusflowdatas_tables(): db = DbProxy() try: sql_str = """CREATE TABLE `s7plusflowdatas` ( `s7plusFlowdataId` int(11) NOT NULL AUTO_INCREMENT, `flowdataHeadId` int(11) DEFAULT NULL, `opcode` longtext, `funcCode` longtext, `reserved1` longtext, `reserved2` longtext, `reserved3` longtext, `reserved4` longtext, `reserved5` longtext, `reserved6` longtext, `packetLenth` int(11) DEFAULT NULL, `packetTimestamp` int(11), `createdAt` longtext, `flowTimestamp` varchar(30), `tableNum` int(5) DEFAULT NULL, `direction` int(11) DEFAULT NULL, `packetTimestampint` bigint DEFAULT NULL, PRIMARY KEY (`s7plusFlowdataId`), index `time_index` (`flowTimestamp`), index `packet_time_index` (`packetTimestamp`) ) ENGINE=MERGE UNION=(s7plusflowdatas_0,s7plusflowdatas_1,s7plusflowdatas_2,s7plusflowdatas_3,s7plusflowdatas_4,s7plusflowdatas_5,s7plusflowdatas_6,s7plusflowdatas_7,s7plusflowdatas_8,s7plusflowdatas_9) INSERT_METHOD=NO AUTO_INCREMENT=1 DEFAULT CHARSET=utf8;""" db.write_db(sql_str) logger.info("add table s7plusflowdatas success.") except: logger.error("add table s7plusflowdatas error.")
def drop_pwdaging_table(): db = DbProxy() try: sql_str = "DROP TABLE IF EXISTS `pwd_aging`" db.write_db(sql_str) except: logger.error("drop_pwdaging_table error.")
def license_info(): """ 返回机器码 """ msg = {} userip = get_oper_ip_info(request) loginuser = request.args.get('loginuser') msg['UserName'] = loginuser msg['UserIP'] = userip msg['ManageStyle'] = 'WEB' msg['Operate'] = u'license授权,获取机器码' msg['Result'] = '1' # 从底层获取机器码 try: license_key = os.popen('mc_gen').read() update_str = "update license_info set license_key = '%s'" % str( license_key) db_proxy = DbProxy(CONFIG_DB_NAME) res = db_proxy.write_db(update_str) msg['Result'] = '0' send_log_db(MODULE_OPERATION, msg) return jsonify({'status': 1, "license_key": license_key}) except: send_log_db(MODULE_OPERATION, msg) return jsonify({'status': 0, "msg": "获取机器码出错! "})
def alter_dev_his_traffic(): db=DbProxy() try: sql_str="alter table dev_his_traffic modify ip varchar(64);" db.write_db(sql_str) except: logger.error("alter table dev_his_traffic modify ip error.")
def alter_dev_band_20s(): db=DbProxy() try: sql_str="alter table dev_band_20s modify devip varchar(64);" db.write_db(sql_str) except: logger.error("alter table dev_band_20s modify devip error.")
def dropsmtpflowdatas_tables(): db = DbProxy() try: sql_str = "DROP TABLE IF EXISTS `smtpflowdatas`" db.write_db(sql_str) except: logger.error("drop table smtpflowdatas error.")
def mw_get_switch_info(): if request.method == 'POST': db_proxy = DbProxy(CONFIG_DB_NAME) post_data = request.get_json() page = post_data.get('page', 0) mac = post_data.get('mac', '').encode('utf-8') sql_str = "select a.name,a.locate, b.port, b.mac from switch_info as a inner join switch_mac_port as b where a.name in (select b.switch_name from switch_mac_port" num_str = "select count(*) from switch_info as a inner join switch_mac_port as b where a.name in (select b.switch_name from switch_mac_port" if mac: sql_str += " where b.mac like '%%%s%%'" % mac num_str += " where b.mac like '%%%s%%'" % mac sql_str += ")" num_str += ")" if page: limit_str = ' order by a.id desc limit ' + str( (page - 1) * 10) + ',10;' sql_str += limit_str res, rows = db_proxy.read_db(sql_str) row_list = [] # 构造结果 if len(rows) > 0: for row in rows: row_list.append(row) # 总条数 res, count = db_proxy.read_db(num_str) total = count[0][0] return jsonify({'rows': row_list, 'num': total, 'page': page})
def safeevent_refresh(): """Example of how to send server generated events to clients.""" db = DbProxy() while True: try: sql_str = "select count(*) from incidents" res, rows = db.read_db(sql_str) if rows: total = rows[0][0] app.logger.info("old total: " + str(total)) socketio.sleep(5) sql_str = "select count(*) from incidents" res, rows = db.read_db(sql_str) if rows: total1 = rows[0][0] app.logger.info("sql total: " + str(total1)) if int(total) != int(total1): if int(total1) != 0: # diff = int(total1) - int(total) # msg = u"安全事件有更新, 数量" + str(diff) + u"条" msg = {'msg_type': 1, 'data': "new_safeevent"} socketio.emit('data_refresh', msg) else: app.logger.error("read select count(*) from incidents error.") except: app.logger.error("safeevent socket error.")
def del_traffictables(): keystoneconfig_table = ('safedevpoint', 'icdevicetrafficstats', 'sorted_dev_traffic_1h', 'dev_his_traffic', 'dev_band_20s', 'icdevicetraffics') db_proxy = DbProxy() for t in keystoneconfig_table: sql_str = 'DROP TABLE IF EXISTS %s' % t db_proxy.write_db(sql_str)
def alter_sorted_dev_traffic_1h(): db=DbProxy() try: MysqlDbUpgradeC.truncate_traffic_tables() sql_str="alter table sorted_dev_traffic_1h modify ip varchar(64);" db.write_db(sql_str) except: logger.error("alter table sorted_dev_traffic_1h modify ip error.")
def get_mac_port_by_ssh(row): db_proxy = DbProxy(CONFIG_DB_NAME) ssh_name = row[8] ssh_pwd = row[9] ip = row[10] switch_name = row[11] if os.path.exists(SWITCH_UPLOAD_FOLDER + "get_mac_port_by_ssh.expect"): os.system("rm -rf " + SWITCH_UPLOAD_FOLDER + "get_mac_port_by_ssh.expect") # 此处要复制文件,否则会失去执行权限 os.system("cp " + SWITCH_UPLOAD_FOLDER + "ssh_template.expect " + SWITCH_UPLOAD_FOLDER + "get_mac_port_by_ssh.expect") # 根据模板生成自动获取mac_port脚本 new_ssh_name = ssh_name + "@" + str(ip) new_ssh_pwd = '{ send "' + ssh_pwd + '\n" }' with open(SWITCH_UPLOAD_FOLDER + 'get_mac_port_by_ssh.expect', 'w+') as new_file: with open(SWITCH_UPLOAD_FOLDER + 'ssh_template.expect', 'r+') as old_file: for line in old_file: if '[email protected]' in line: line = line.replace('[email protected]', new_ssh_name) if '{ send "123456\n" }' in line: line = line.replace('{ send "123456\n" }', new_ssh_pwd) new_file.write(line) # 执行mac_port脚本获取原始mac_port数据文件 cmd = "./data/switchinfo/get_mac_port_by_ssh.expect | sed -e 's/^ .*16D//' -e '/^[ \\r<]/d' | sed -n '/MAC/,$p' | awk '{print $1,$4}' >" + SWITCH_UPLOAD_FOLDER + "ssh_mac_port" logger.info(cmd) os.system(cmd) # 清洗数据存数据库 logger.info(os.path.getsize(SWITCH_UPLOAD_FOLDER + "ssh_mac_port")) if os.path.getsize(SWITCH_UPLOAD_FOLDER + "ssh_mac_port") > 0: # 读取获得的结果写数据库 with open(SWITCH_UPLOAD_FOLDER + "ssh_mac_port", 'r+') as f: del_str = "delete from switch_mac_port where switch_name='{}'".format( switch_name) logger.info(del_str) db_proxy.write_db(del_str) for line in f.readlines(): if not line.startswith("MAC"): # 去掉标题行 # 0050-56c0-0003 GigabitEthernet1/0/24 tmp_mac = line[0:14].replace("-", "") mac = re.sub(r"(?<=\w)(?=(?:\w\w)+$)", ":", tmp_mac) port = line.split(" ")[1].strip() insert_sql = "insert into switch_mac_port (switch_name, mac, port) values ('{}','{}','{}')".format( switch_name, mac, port) # logger.info(insert_sql) res = db_proxy.write_db(insert_sql) # 删除执行文件 os.system("rm -rf " + SWITCH_UPLOAD_FOLDER + "get_mac_port_by_ssh.expect") os.system("rm -rf " + SWITCH_UPLOAD_FOLDER + "ssh_mac_port")
def del_keystonetable(): keystoneconfig_table = ('customrules', 'definedprotocol', 'dev_band_threshold', 'ipmaclist', 'rules', 'signatures', 'vulnerabilities', 'whiteliststudy', 'nsm_flowfull', 'nsm_flowaudit', 'nsm_flowftpinfo', 'nsm_dbgcollect', 'nsm_sysbackupinfo', 'nsm_confsave', 'nsm_rptalarminfo', 'nsm_rptprotoinfo', 'nsm_rptloginfo', 'auditstrategy', 'nsm_raidstatus', 'dev_name_conf') db_proxy = DbProxy() for t in keystoneconfig_table: sql_str = 'DROP TABLE IF EXISTS %s' % t db_proxy.write_db(sql_str)
def add_signatures(): db = DbProxy() try: sql_str = """INSERT INTO `signatures` VALUES (NULL,'1065387488247104870',0,'LiMJJmW3x/LdsJOZvMYukHIBYQak1nzx102tJGXMNUDgfGNF40EyDl5J9JmxF7NsBpZKr6IFFJHz8r2WAMU1qK3KK4N1pw/dBm6KOLQ0VP6oL67CIipz4x+tCZpIBjy3PW+1GmDfmmVXj0+acxDTRnbAsZlvzCUqv1E1Yvi6B3wEHQktniTKALvPhqQfr5lJNFqzawVpcDvveCUCm23fRYVQzUeN889+TEL/KNvGzdbS6a41UGxVNCiN3x5MRLrzdH64SvAC9AQFNjJ4bKwTb57tskyBfoWGijwSdTIGpdF3U8n6VbahOFKQqT5TH3ZWkpXY5Q9PaGSpHjPJ18jeNdhJmIjertf35pOGSJZ8tylDyyfR37mJF+vNm8M3s2fGl8r3p5UrRJmJcgZOqrfKlQ==',3,101116,'ICS-ATTACK.TRISIS/TRITON READ REQUEST','ICS-ATTACK.TRISIS/TRITON READ REQUEST',NULL,1,NULL,0,2,0,12),(NULL,'2875134312413836186',0,'LiMJJmW3x/LdsJOZvMYukHIBYQak1nzx102tJGXMNUDgfGNF40EyDl5J9JmxF7NsBpZKr6IFFJHz8r2WAMU1qN0zcWz7KbMrk6eYHGF++7B4mp/+lvz35M/1T+Lplr4Q3eEUJBTyq/3+QNQzjTAj7VLsUDLov4bCRzs5zo1L/26jmMooJqW5+BkT1b83U+GbMkqtTuRWexaC8nQG3RsUgNvPWMTh4LTHGoYa5bMP5W9n71GyfB7BziLHcQwOj5PDx4j7QdU2KmASQVDjNR8dzMb6huXPkACFXpt47dU4SGOGRSZU0AWmQC808b6EvzqY9rwttlyS+bkH2IBwT6G9agsotxPm6jTBAP55qLqo2cRA+JsZFvASuJ3VI0ThL04Kzr4MQYCunvHnLASX2+LbLw==',3,101117,'ICS-ATTACK.TRISIS/TRITON WRITE REQUEST','ICS-ATTACK.TRISIS/TRITON WRITE REQUEST',NULL,1,NULL,0,2,0,12),(NULL,'1591739768215462186',0,'LiMJJmW3x/LdsJOZvMYukHIBYQak1nzx102tJGXMNUDgfGNF40EyDl5J9JmxF7NsBpZKr6IFFJHz8r2WAMU1qBsu9Nb5vziRGqv+WMrmHj1uBer3K+Qha5uxkUzawkeEFS0mrcG20QCUE8cvG/FAos8q+QV3JGNbgY5RL27K6QcN95k6W3k3OhIqUgC7moLOqdmtq5TtirkP8PEntwyoc2TzitH7tBz8pfdmWacfKumsPKcS26IjmUTRGt42T+p/vQCPq4TdKExZXCzkeQF7RjTqYiMa2EoocH6AbHvzl3b7tWJ4n/uPGDRm2FvFVASIPijPoeIdrFIiic6a5FfEknuZPIPf0L8wx5+je/mc4jZiomDxYiSdGBeWNJhkG7epaf6xERn23xazC9PZ1yKbXdfSoXa2V6qQhl5N1imfqj/Kwcq+ALnOWgH179m7e07btXbGvc4zzucEza/d2Gr7Nqz0v1l0W8PavzocYmnVlOPfuStHNgB0sQlYtdVTXsD7',3,101118,'ICS-ATTACK.TRISIS/TRITON EXECUTE REQUEST','ICS-ATTACK.TRISIS/TRITON EXECUTE REQUEST',NULL,1,NULL,0,2,0,12);""" res = db.write_db(sql_str) if res == 0: logger.info("add_signatures ok.") else: logger.error("add_signatures write_db error.") except: logger.error("add_signatures error.")
def add_vulnerabilities(): db = DbProxy() try: sql_str = """INSERT INTO `vulnerabilities` values ('1591739768215462186',1,'code_execution','ICS-ATTACK.TRISIS/TRITON EXECUTE REQUEST',NULL,'施耐德电气Triconex Tricon MP3008存在漏洞。恶意软件可以在攻陷SIS系统后,对SIS系统逻辑进行重编程,使SIS系统产生意外动作,对正常生产活动造成影响;或是造成SIS系统失效,在发生安全隐患或安全风险时无法及时实行和启动安全保护机制;亦或在攻陷SIS系统后,对DCS系统实施攻击,并通过SIS系统与DCS系统的联合作用,对工业设备、生产活动以及操作人员的人身安全造成巨大威胁。',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,'2017-12-14',101118,0),('1065387488247104870',1,'code_execution','ICS-ATTACK.TRISIS/TRITON READ REQUEST',NULL,'施耐德电气Triconex Tricon MP3008存在漏洞。恶意软件可以在攻陷SIS系统后,对SIS系统逻辑进行重编程,使SIS系统产生意外动作,对正常生产活动造成影响;或是造成SIS系统失效,在发生安全隐患或安全风险时无法及时实行和启动安全保护机制;亦或在攻陷SIS系统后,对DCS系统实施攻击,并通过SIS系统与DCS系统的联合作用,对工业设备、生产活动以及操作人员的人身安全造成巨大威胁。',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,'2017-12-14',101116,0),('2875134312413836186',1,'code_execution','ICS-ATTACK.TRISIS/TRITON WRITE REQUEST',NULL,'施耐德电气Triconex Tricon MP3008存在漏洞。恶意软件可以在攻陷SIS系统后,对SIS系统逻辑进行重编程,使SIS系统产生意外动作,对正常生产活动造成影响;或是造成SIS系统失效,在发生安全隐患或安全风险时无法及时实行和启动安全保护机制;亦或在攻陷SIS系统后,对DCS系统实施攻击,并通过SIS系统与DCS系统的联合作用,对工业设备、生产活动以及操作人员的人身安全造成巨大威胁。',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,'2017-12-14',101117,0);""" res = db.write_db(sql_str) if res == 0: logger.info("add_vulnerabilities ok.") else: logger.error("add_vulnerabilities write_db error.") except: logger.error("add_vulnerabilities error.")
def add_goose_alldata_column(): db = DbProxy() try: MysqlDbUpgradeC.drop_gooseflowdatas_tables() for table_id in range(0, 10): sql_str = "alter table gooseflowdatas_%d add column allData longtext null default null after goID;" % table_id db.write_db(sql_str) logger.info("add_goose_alldata_column write_db success.") MysqlDbUpgradeC.creat_gooseflowdatas_tables() except: logger.error("add_goose_alldata_column error.")
def add_pop3_mail_title(): db=DbProxy() try: MysqlDbUpgradeC.droppop3flowdatas_tables() for table_id in range(0, 10): sql_str="alter table pop3flowdatas_%d add column MailTitle longtext null default null after destMailAddress;" % table_id db.write_db(sql_str) logger.info("add_smtp_mail_title write_db success.") MysqlDbUpgradeC.creat_pop3flowdatas_tables() except: logger.error("add_pop3_mail_title error.")
def add_macfilter_table(): db = DbProxy() try: sql_str = """CREATE TABLE `mac_filter` ( `id` int not null primary key auto_increment, `mac` varchar(32), `enable` int(6) DEFAULT 0 ) ENGINE=MYISAM DEFAULT CHARSET=utf8;""" db.write_db(sql_str) logger.info("add table macfilter success.") except: logger.error("add table macfilter error.")
def alter_table_users(): """ 单机登陆功能:用户表增加更新时间 :return: """ alter_sql = "alter table users add column last_update varchar(64) default null after editAt" db_config = DbProxy("keystone_config") try: db_config.write_db(alter_sql) logger.info("alter_table_users ok.") except: logger.error(traceback.format_exc()) logger.error("alter_table_users error.")
def alter_table_top_config(): """ 升级后默认资产告警周期为五分钟 :return: """ alter_sql = "update top_config set check_time=5 where check_time=1" db_config = DbProxy("keystone_config") try: db_config.write_db(alter_sql) logger.info("alter_table_top_config ok.") except: logger.error(traceback.format_exc()) logger.error("alter_table_top_config error.")
def add_asset_report_tables(): """ 资产报告:资产报告表 :return: """ cre_str = """CREATE TABLE `nsm_rptassetsinfo` ( `id` int not null primary key auto_increment, `report_name` longtext, `report_time` longtext, `report_freq` int, `report_raw_file` longtext, `report_statics_file` longtext ) ENGINE=MYISAM DEFAULT CHARSET=utf8""" cre_str1 = """CREATE TABLE `nsm_rptassetsmode` ( `flag` int(11) NOT NULL )ENGINE=MYISAM DEFAULT CHARSET=utf8""" add_str = "INSERT INTO `nsm_rptassetsmode` VALUES (2)" db_config = DbProxy("keystone_config") try: db_config.write_db(cre_str) db_config.write_db(cre_str1) db_config.write_db(add_str) logger.info("add_asset_report_tables ok.") except: logger.error(traceback.format_exc()) logger.error("add_asset_report_tables error.")
def license_status_verify(): license_res = str( os.popen('lic_verify /data/licensetmpfolder/licensefile.lic').read()) license_legal = int(license_res.split("-")[0]) license_time = license_res.split("-")[1] license_func = license_res.split("-")[2] # 上传文件合法 if license_legal == 1: update_str = "update license_info set license_legal = '%s',license_time = '%s',license_func = '%s'" % ( license_legal, license_time, license_func) db_proxy = DbProxy(CONFIG_DB_NAME) res = db_proxy.write_db(update_str) return license_legal, license_time, license_func else: return 0, 0, 0
def start_pcap(): if request.method == 'POST': result = request.get_json() pcap_name = result['pcap_name'] pcap_orig_size = result['pcap_orig_size'] pcap_orig_time = result['pcap_orig_time'] # 记录日志 msg = {} userip = get_oper_ip_info(request) data = request.get_json() loginuser = data.get('loginuser', '') msg['UserName'] = loginuser msg['UserIP'] = userip msg['ManageStyle'] = 'WEB' msg['Operate'] = u'开始在线抓包' # 判断文件名是否重复 db_proxy = DbProxy(CONFIG_DB_NAME) sql_str = "select pcap_name from pcap_down_data" sql_count = "select count(*) from pcap_down_data " res, name_result = db_proxy.read_db(sql_str) pcap_name = pcap_name + '.pcap' for name in name_result: if pcap_name == name[0]: return jsonify({'status': 0, 'msg': '文件名重复'}) else: continue # 判断数据库数据是否超过10条 res, count_rows = db_proxy.read_db(sql_count) total = count_rows[0][0] if int(total) >= 10: return jsonify({'status': 0, 'msg': '文件总数超过10条'}) else: pcap_start_time = int(time.time()) sql_str = "update pcap_down_status set flag=1,pcap_name='%s',pcap_orig_size='%s',pcap_orig_time='%s',pcap_start_time='%s';" % ( pcap_name, pcap_orig_size, pcap_orig_time, pcap_start_time) res = db_proxy.write_db(sql_str) if res == 0: msg['Result'] = '0' send_log_db(MODULE_OPERATION, msg) return jsonify({'status': 1, 'msg': '开始抓包成功'}) else: msg['Result'] = '1' send_log_db(MODULE_OPERATION, msg) return jsonify({'status': 0, 'msg': '开始抓包失败'})
def mw_export_switch_info(): db_proxy = DbProxy(CONFIG_DB_NAME) loginuser = request.args.get('loginuser') msg = {} userip = get_oper_ip_info(request) msg['UserIP'] = userip msg['UserName'] = loginuser msg['ManageStyle'] = 'WEB' try: # 防止初次导出时无文件报错 if not os.path.exists(SWITCH_UPLOAD_FOLDER): os.makedirs(SWITCH_UPLOAD_FOLDER) os.system('rm /data/switchinfo/switch_list.csv') csvfile = open('/data/switchinfo/switch_list.csv', 'wb') csvfile.write(codecs.BOM_UTF8) writer = csv.writer(csvfile, dialect='excel') writer.writerow([ '交换机名称', 'IP', '类型', '位置', 'SNMP版本', '团体名', '安全等级', '安全用户名', '认证方式', '认证密码', '加密方式', '加密密码', 'ssh用户名', 'ssh密码' ]) sql_str = "select name,ip, type, locate,snmp_version,group_name,security_level,security_name,auth_mode,auth_pwd,priv_mode,priv_pwd,ssh_name,ssh_pwd from switch_info order by id desc" result, rows = db_proxy.read_db(sql_str) for row in rows: row = list(row) row[6] = SECURITY_LEVEL_DICT[str(row[6])] row[4] = SNMP_VERSION_DICT[str(row[4])] # 导出时去掉v3版本字段默认信息 if not row[7]: row[4] = "" row[6] = "" row[8] = "" row[10] = "" writer.writerow(row) csvfile.close() msg['Operate'] = u"导出switch_list" msg['Result'] = '0' send_log_db(MODULE_OPERATION, msg) return send_from_directory(SWITCH_UPLOAD_FOLDER, "switch_list.csv", as_attachment=True) except: current_app.logger.error(traceback.format_exc()) msg['Operate'] = u"导出switch_list" msg['Result'] = '1' send_log_db(MODULE_OPERATION, msg) status = 0 return jsonify({'status': status})
def search_license_status(): """ 查询license是否授权 """ # 恢复出厂设置后更新授权状态为True if os.path.exists(LICENSE_RECOVER_ADDR): update_str = "update license_info set license_legal = 1,license_time = '0',license_func = '0'" db_proxy = DbProxy(CONFIG_DB_NAME) db_proxy.write_db(update_str) os.system('rm -rf /data/licsystemreset') search_str = "select license_legal, license_time, license_func from license_info" db_proxy = DbProxy(CONFIG_DB_NAME) res, rows = db_proxy.read_db(search_str) try: license_legal = int(rows[0][0]) license_time = rows[0][1] license_func = rows[0][2] except: return jsonify({'status': 0, "msg": "获取license授权状态错误!"}) if license_legal == 1: # 删除授权文件 os.system('rm -rf /data/licensetmpfolder/licensefile.lic') return jsonify({ 'status': 1, "license_time": license_time, "license_func": license_func }) else: return jsonify({'status': 0, "msg": "license未授权!"})
def detail_pcap(): db_proxy = DbProxy(CONFIG_DB_NAME) sql_str = "SELECT * from pcap_down_data order by finish_time desc" sql_count = "select count(*) from pcap_down_data " result, rows = db_proxy.read_db(sql_str) rows = list(rows) pcap_detail = list() for i in rows: detail = {} detail["id"] = i[0] detail["pcap_name"] = i[2] detail["pcap_cur_size"] = i[3] detail["pcap_cur_time"] = i[4] pcap_detail.append(detail) res, count_rows = db_proxy.read_db(sql_count) total = count_rows[0][0] return jsonify({'rows': pcap_detail, 'total': total})