def update_last_check(db, alertType, url, webId, statusCode): try: cur = db.alert.find({ "_type" : url, "_index" : setting.agentName, "type" : alertType, "status_code" : statusCode, "web_id" : webId }).sort('update_at', -1) try: res = list(cur)[0] if (time.time() - int(res['insert_time'])) <= 60*60*24: db.alert.update({ "_type" : url, "_index" : setting.agentName, "type" : alertType, "status_code" : statusCode, "web_id" : webId, "update_at" : res['update_at'] }, {"$set": { "update_at" : time.time() }}, upsert = False, multi = False) except: pass except Exception: error.catchError(traceback.format_exc()) return
def take_shot(url, name, statusCode, id): try: driver = setting.webdriver.PhantomJS(service_args=['--ignore-ssl-errors=true', '--ssl-protocol=any', '--web-security=false']) driver.set_window_size(1280, 720) driver.set_page_load_timeout(150) try: #setting.threadLock.acquire() if isinstance(id, int): if not os.path.isfile("/var/www/html/web/app/alert_image/" + name + "_" +str(id)+"_"+ str(statusCode) + "_" +str(setting.date.day)+"-"+str(setting.date.month)+ "-" + str(setting.date.year) +".jpg"): driver.get(url) #setting.driver.execute_script('document.body.style.background = "white"') name = "/var/www/html/web/app/alert_image/" + name + "_" +str(id)+"_"+ str(statusCode) + "_" +str(setting.date.day)+"-"+str(setting.date.month)+ "-" + str(setting.date.year) +".jpg" driver.save_screenshot(name) else: if not os.path.isfile("/var/www/html/web/app/alert_image/" + name + "_" +str(id[0][0])+"_"+ str(statusCode) + "_" +str(setting.date.day)+"-"+str(setting.date.month)+ "-" + str(setting.date.year) +".jpg"): driver.get(url) for each in id: #setting.driver.execute_script('document.body.style.background = "white"') name = "/var/www/html/web/app/alert_image/" + name + "_" +str(each[0])+"_"+ str(statusCode) + "_" +str(setting.date.day)+"-"+str(setting.date.month)+ "-" + str(setting.date.year) +".jpg" driver.save_screenshot(name) driver.close() driver.quit() #setting.threadLock.release() return True except Exception: driver.close() driver.quit() #setting.threadLock.release() error.catchError(traceback.format_exc()) return False except: for proc in psutil.process_iter(): if proc.as_dict(attrs=['pid', 'name'])['name'] == 'phantomjs': proc.kill() return False
def check_status(db, alertType, url, webId): try: cur = db.alert.find({ "_type" : url, "_index" : setting.agentName, "type" : alertType, "web_id" : webId }).sort('update_at', -1) try: res = list(cur)[0] if res['events'][len(res['events']) - 1]['end_at'] == u"thời điểm hiện tại": res['events'][len(res['events']) - 1]['end_at'] = str(datetime.now().replace(microsecond=0)).replace(' ','T') db.alert.update({ "_type" : url, "_index" : setting.agentName, "type" : alertType, "web_id" : webId, "update_at" : res['update_at'] }, {"$set": { "events" : res['events'], "update_at" : time.time() }}, upsert = False) except: pass except Exception: error.catchError(traceback.format_exc()) return
def ip_alert(oldIp, newIp, hostName, webId, db): setting.listHostIp[hostName] = 1 try: cur = db.alert.find({ "_type" : hostName, "_index" : setting.agentName, "type" : "ipchanged", "web_id" : webId, "new_ip" : newIp }).sort('update_at',-1) try: res = list(cur)[0] except: alert = { '_type' : hostName, '_index' : setting.agentName, 'type' : "ipchanged", 'name' : "Địa chỉ IP đã bị thay đổi", 'is_alerted' : 0, 'false_positive' : 0, 'severity' : 2, 'old_ip' : oldIp, 'web_id' : webId, 'events' : [], 'insert_time' : int(time.time()), 'new_ip' : newIp, 'host_name' : hostName } temp = {} temp['start_at'] = str(datetime.now().replace(microsecond=0)).replace(' ', 'T') temp['end_at'] = 'thời điểm hiện tại' alert['events'].append(temp) alert['update_at'] = time.time() setting.db.alert.insert_one(alert) else: if res['false_positive'] == 1: print "DETECTED wrong ip alert" if newIp not in oldIp: oldIp = oldIp + ';' + newIp tmp = "UPDATE trackWebsite SET ip = '%s' WHERE id = '%d'" % (oldIp, webId) setting.MySQLUpdate.append(tmp) return True else: setting.db.alert.update({ "_type" : hostName, "_index" : setting.agentName, "type" : "ipchanged", "insert_time" : res['insert_time'], "web_id" : webId }, {"$set": { "update_at" : time.time() }}, upsert = False) return False except Exception: error.catchError(traceback.format_exc()) return
def insert_event(statusCode, hostName, timecheck, cursor, timeResponse, ipAddress, userId, webId): try: if statusCode < 400: status = "OK (%d)" % statusCode event = "Up" else: status = "FALSE (%d)" % statusCode event = "Down" if timeResponse > 10000: status = "OK (Slow Connection)" sql = "SELECT * FROM events WHERE webId = %d ORDER BY time DESC" % ( webId) cursor.execute(sql) result = cursor.fetchall() if len(result) == 0: sql = "INSERT INTO events (event, hostName, reason, time, duration, userId, webId) VALUES ('%s', '%s' , '%s', '%d', '%s', '%d', '%d')" % ( "Started", hostName, "OK", timecheck, "0:0:0", userId, webId) cursor.execute(sql) sql = "INSERT INTO events (event, hostName, reason, time, duration, lastCheck, userId, webId) VALUES ('%s', '%s' , '%s', '%d', '%s', '%d', '%d', '%d')" % ( event, hostName, status, timecheck + 1, "0:0:0", timecheck + 1, userId, webId) cursor.execute(sql) statistics(hostName, statusCode, 0, cursor, timeResponse, ipAddress, userId, webId) else: triggerEvent = result[0][1] triggerHostName = result[0][2] triggerReason = result[0][3] triggerTime = result[0][4] triggerLast = result[0][6] statistics(hostName, statusCode, int(timecheck - triggerLast), cursor, timeResponse, ipAddress, userId, webId) flag = 1 if statusCode == 408: if time.time() - setting.alert408[webId] < 300: flag = 0 elif status != triggerReason: timecheck = timecheck - 12 * 60 * 500 if flag == 1: duration = calculate_duration(triggerTime, timecheck) sql = " UPDATE events SET duration = '%s', lastCheck = %d WHERE time = %d AND webId = %d" % ( duration, timecheck, triggerTime, webId) cursor.execute(sql) if status != triggerReason: sql = "INSERT INTO events (event, hostName, reason, time, duration, lastCheck, userId, webId) VALUES ('%s', '%s' , '%s', '%d', '%s', '%d', '%d', '%d')" % ( event, hostName, status, timecheck, "0:0:0", timecheck, userId, webId) cursor.execute(sql) except Exception: error.catchError(traceback.format_exc(), sql) return
def thread_insert_data_MySQL(): # global actions, MySQLEvent, MySQLQuery, MongoQuery # client = MongoClient() # db = client['webix'] openData = MySQLdb.connect('localhost', 'root', 'vnistadmin', 'webix') MySQLCur = openData.cursor() cnt = 0 while 1: print str(time.ctime()) + " Checking Data MySQL" try: if len(setting.MySQLUpdate) > 0 or len(setting.MySQLEvent) > 0: print str(time.ctime()) + " Inserting Data MySQL" while len(setting.MySQLUpdate) > 0: query = setting.MySQLUpdate[0] MySQLCur.execute(query) setting.MySQLUpdate.pop(0) cnt += 1 while len(setting.MySQLEvent) > 0: data = setting.MySQLEvent[0] events.insert_event(data[0], data[1], data[2], MySQLCur, data[3], data[4], data[5], data[6]) setting.MySQLEvent.pop(0) cnt += 1 openData.commit() if cnt > 0: print str(time.ctime()) + " Inserted Data MYSQL" cnt = 0 if setting.helpstat == 1: f = open("help.txt", "a") f.write("Done MySQL\n") f.close() time.sleep(60) except KeyboardInterrupt: print "Cancel Inserting Data" return except: error.catchError(traceback.format_exc(), query) print "Error while inserting MySQL Data" openData = MySQLdb.connect('localhost', 'root', 'vnistadmin', 'webix') MySQLCur = openData.cursor() time.sleep(60) pass
def thread_insert_data_ElasticSearch(): times = 0 while 1: print str(time.ctime()) + " Checking Data ES" try: if len(setting.ESData) > 0: print str(time.ctime()) + " Inserting Data ES" while len(setting.ESData) > 0: try: #print setting.ESData while len(setting.ESData) > 100: tmp = setting.ESData[0:100] helpers.bulk(setting.es, tmp) for i in range(0, 100): setting.ESData.pop(0) helpers.bulk(setting.es, setting.ESData) except: times += 1 if times > 3: setting.es = Elasticsearch() break print traceback.format_exc() print "Failed. Trying Again" pass else: del setting.ESData[:] print str(time.ctime()) + " Inserted Data ElasticSearch" times = 0 if setting.helpstat == 1: f = open("help.txt", "a") f.write("Done ES\n") f.close() time.sleep(60) except KeyboardInterrupt: print "Cancel Inserting Data ElasticSearch" return except: error.catchError(traceback.format_exc()) print "Error while inserting ElasticSearch" return
def thread_insert_data_MongoDb(): cnt = 0 while 1: print str(time.ctime()) + " Checking Data MongoDb" try: if len(setting.MongoData) > 0: print str(time.ctime()) + " Inserting Data MongoDb" while len(setting.MongoData) > 0: data = setting.MongoData[0] POT = alert.insert_alert(setting.db, data[0], data[1], data[2], data[3], data[4], data[5], data[6], data[7], data[8], data[9], data[10]) if POT == False: alert.check_status(setting.db, 'defaced', data[3], data[6]) sql = "UPDATE webStruct SET isStructed = 0 where webId = '%d'" % data[ 6] setting.MySQLUpdate.append(sql) setting.MongoData.pop(0) cnt += 1 if cnt > 0: print str(time.ctime()) + " Inserted Data MongoDb" cnt = 0 if setting.helpstat == 1: f = open("help.txt", "a") f.write("Done Mongo\n") f.close() time.sleep(60) except KeyboardInterrupt: print "Cancel Inserting Data MongoDb" return except: error.catchError(traceback.format_exc()) print "Error while inserting MongoDb" setting.client = MongoClient() setting.db = setting.client['webix'] time.sleep(60) pass
def statistics(hostName, statusCode, time, cursor, timeResponse, ipAddress, userId, webId, timeOK=0): try: if statusCode < 400: timeOK = time sql = "SELECT timeOK, timeTOTAL, averageTime, times FROM statistic WHERE webId = %d" % ( webId) cursor.execute(sql) result = cursor.fetchall() if len(result) == 0: sql = "INSERT INTO statistic (hostName, timeOK, timeTOTAL, averageTime, ip, userId, webId, times) VALUES ('%s','%d', '%d', '%f', '%s', '%d', '%d', 1)" % ( hostName, 0, 0, timeResponse, ipAddress, userId, webId) cursor.execute(sql) else: timeOK = result[0][0] + timeOK averageTime = float(result[0][2]) timeTOTAL = result[0][1] + time times = int(result[0][3]) if statusCode < 400: averageTime = (result[0][2] * int(times) + timeResponse) / (int(times) + 1) times += 1 sql = "UPDATE statistic SET timeOK = %d, timeTOTAL = %d, averageTime = '%f', times = %d WHERE webId = %d" % ( timeOK, timeTOTAL, averageTime, times, webId) cursor.execute(sql) except Exception: error.catchError(traceback.format_exc(), sql) return
def check(self, url, name, ipAddress, userId, webId, prevStatus): sql = "" statusCode = 0 state = 0 timeAVG = 0 warning = "" severity = 0 timecheck = (time.time()) * 1000 flag = 1 fail = 0 lastTime = time.time() try: # print "%d - dong 88: - %s" %(self.id, hostName) for loop in range(2): try: r = requests.get(url, allow_redirects=True, headers=setting.headers, verify=False, timeout=20) except: # print "%d - dong 94: - %s" %(self.id, hostName) if fail == 0: fail += 1 else: # print "%s : Fail to check " % datetime.now(), link warning = "Request Timeout" timeAVG = 0.0 statusCode = 408 state = 0 link = None severity = 2 for web in webId: try: lastTime = setting.alert408[web[0]] except KeyError: setting.alert408[web[0]] = time.time() continue # print "%d - dong 111: - %s" %(self.id, hostName) else: # print "%d - dong 113: - %s" %(self.id, hostName) for web in webId: try: del setting.alert408[web[0]] del setting.tmpMongoData[web[0]] except KeyError: # for web in webId: # print web[0] # print setting.alert408 continue # print "%d - dong 119: - %s" %(self.id, hostName) timeAVG = r.elapsed.total_seconds() * 1000 if r.status_code == 200: state = 1 if timeAVG > 10000: warning = "Slow Connection" severity = 1 else: # print "%d - dong 128: - %s" %(self.id, hostName) for web in webId: alert.check_status(setting.db, "performance", url, web[0]) # print "%d - dong 131: - %s" %(self.id, hostName) warning = "Normal" i = 0 # print str(time.ctime()) + " : Checking deface " + hostName # print "%d - dong 135: - %s" %(self.id, hostName) for user in userId: deface.check_deface(r.content, url, name, ipAddress, webId[i][0], user[0]) i += 1 # print "%d - dong 139: - %s" %(self.id, hostName) elif r.status_code > 399 and r.status_code < 500: warning = "Client Error" timeAVG = 0.0 severity = 2 # print str(time.ctime()) + " : Taking pic " + hostName # print "%d - dong 145: - %s" %(self.id, hostName) alert.take_shot(url, name, r.status_code, webId) # print "%d - dong 148: - %s" %(self.id, hostName) state = 0 flag = 0 # print str(time.ctime()) + " : Done " + hostName elif r.status_code >= 500: warning = "Internal Server Error" timeAVG = 0.0 severity = 3 # print str(time.ctime()) + " : Taking pic " + hostName # print "%d - dong 158: - %s" %(self.id, hostName) alert.take_shot(url, name, r.status_code, webId) # print "%d - dong 161: - %s" %(self.id, hostName) state = 0 # print str(time.ctime()) + " : Done " + hostName flag = 0 statusCode = r.status_code break i = 0 # print "%d - dong 168: - %s" %(self.id, hostName) for user in userId: action = { "_index": "user-%d" % user[0], "_type": "web-%d" % webId[i][0], "_id": timecheck, "_source": { 'name': name, 'ip': ipAddress, 'state': state, 'time_res': timeAVG, 'status_code': statusCode, 'warning': warning, 'time': timecheck, 'user_id': user[0], 'web_id': webId[i][0] } } i += 1 setting.ESData.append(action) # print "%d - dong 189: - %s" %(self.id, hostName) i = 0 if statusCode == 408: if time.time() - lastTime > 300: flag = 0 state = 0 print time.time() - lastTime else: for web in webId: try: setting.tmpMongoData[web[0]] print "Adding more Query of id: %d" % (web[0]) except KeyError: print "Creating TMP Query for 408 Error of %s" % ( name) setting.tmpMongoData[web[0]] = [] continue i = 0 for web in webId: tmpMongo = ('performance', severity, ipAddress, name, timeAVG, statusCode, web[0], url, None, None, str(datetime.now().replace( microsecond=0)).replace(' ', 'T')) setting.tmpMongoData[web[0]].append(tmpMongo) # print "%d - dong 226: - %s" %(self.id, hostName) # print "%d - dong 232: - %s" %(self.id, hostName) for user in userId: tmpSQL = (statusCode, url, timecheck, timeAVG, ipAddress, user[0], webId[i][0]) setting.MySQLEvent.append(tmpSQL) i += 1 if flag == 0: for web in webId: try: setting.MongoData += setting.tmpMongoData[web[0]] del setting.tmpMongoData[web[0]] except KeyError: continue for web in webId: tmpMongo = ('performance', severity, ipAddress, name, timeAVG, statusCode, web[0], url, None, None, str(datetime.now().replace( microsecond=0)).replace(' ', 'T')) setting.MongoData.append(tmpMongo) # print "%d - dong 244: - %s" %(self.id, hostName) if prevStatus != state: sql = "UPDATE trackWebsite SET status = %d WHERE url = '%s'" % ( state, url) setting.MySQLUpdate.append(sql) # print "End: %d - dong 248: - %s" %(self.id, hostName) except Exception: error.catchError(traceback.format_exc()) return
def get_mysql_data(): connect = MySQLdb.connect('localhost', 'root', 'vnistadmin', 'webix') cur = connect.cursor() threadMySQL = Thread(target=thread_insert_data_MySQL, args=()) threadMySQL.start() threadMongoDb = Thread(target=thread_insert_data_MongoDb, args=()) threadMongoDb.start() threadElasticSearch = Thread(target=thread_insert_data_ElasticSearch, args=()) threadElasticSearch.start() threadRestart = Thread(target=force_to_restart, args=()) threadRestart.start() print threading.enumerate() cur.execute("SELECT object, struct, webId FROM webStruct") res_obj = cur.fetchall() for each in res_obj: setting.listObject[each[2]] = each while 1: setting.date = datetime.now() setting.exitFlag = 0 try: threadworker = [] with connect: try: cur.execute( "SELECT limitTime, isStructed, webId FROM webStruct") res_web = cur.fetchall() for each in res_web: setting.listWebData[each[2]] = each cur.execute("SELECT learningTime, id FROM trackWebsite") res_id = cur.fetchall() for each in res_id: setting.listLearnTime[each[1]] = each[0] cur.execute( "SELECT DISTINCT name,url,status, ip FROM trackWebsite" ) result_Host = cur.fetchall() print "Processing " + str(len(result_Host)) + " website" index = 1 for each in result_Host: #print "Index: %d - Domain: %s" %( index, each[1]) index += 1 hostName = each[1] temp = {} temp['status'] = each[2] temp['ip'] = each[3] temp['url'] = hostName res = urlparse(hostName) # if "http://" in hostName: # hostName = hostName.replace("http://","") # if "https://" in hostName: # hostName = hostName.replace("https://","") temp['name'] = each[0] temp['hostName'] = res.netloc sql = "SELECT userId from trackWebsite WHERE url = '%s'" % ( each[1]) cur.execute(sql) res = cur.fetchall() temp['userId'] = res sql = "SELECT id from trackWebsite WHERE url = '%s'" % ( each[1]) cur.execute(sql) res = cur.fetchall() temp['id'] = res setting.queue.put(temp) #print temp connect.commit() except: error.catchError(traceback.format_exc()) print "QUERYING DATABASE ERROR" sys.exit() startTime = time.time() qsize = setting.queue.qsize() print "Creating 100 Thread" for i in range(0, 100): threadworker.append(worker.worker(i)) threadworker[i].start() while not setting.queue.empty(): time.sleep(3) print "%f s -------- %d s ------ queue size: %d -------- active thread: %d" % ( time.time() - startTime, 20 * qsize, setting.queue.qsize(), threading.active_count()) if time.time() - startTime > 20 * qsize: while not setting.queue.empty(): setting.queue.get() break pass setting.exitFlag = 1 time.sleep(30) print "Killing alll" setting.startTime = time.time() while threading.active_count() > 7: for x in threadworker: x.join(0.5) Thread._Thread__stop(x) setting.startTime = 0 print "All thread are closed!" print "active thread: %d" % (threading.active_count()) print threading.enumerate() time.sleep(150) except KeyboardInterrupt: print "\nYou press Ctrl + C\nProgram terminated" setting.exitFlag = 1 connect.close() for t in threadworker: t.join(10) Thread._Thread__stop(threadMySQL) Thread._Thread__stop(threadMongoDb) Thread._Thread__stop(threadElasticSearch) Thread._Thread__stop(threadRestart) sys.exit() except Exception: error.catchError(traceback.format_exc()) connect.close() sys.exit()
def insert_alert(db, alertType, severity, des_ip, hostName, timeResponse, statusCode, webId, url, diff, suspicious, startTime): try: image_path = "" cur = db.alert.find({ "_type" : url, "_index" : setting.agentName, "type" : alertType, "web_id" : webId }).sort('update_at',-1) flag = 0 try: res = list(cur)[0] except: flag = 1 else: if (res['status_code'] != statusCode) or (res['type'] == 'defaced' and check_diff(diff, res['deface_at'])): if res['events'][len(res["events"]) -1]['end_at'] == u'thời điểm hiện tại': res['events'][len(res['events']) - 1]['end_at'] = str(datetime.now().replace(microsecond=0)).replace(' ','T') db.alert.update({ "_type" : url, "_index" : setting.agentName, "status_code" : res['status_code'], "insert_time" : res['insert_time'], "severity" : res['severity'], "web_id" : webId, },{"$set": { "events" : res['events'], "update_at" : time.time() }}, upsert = False) time.sleep(1) cur = db.alert.find({ "_type" : url, "_index" : setting.agentName, "type" : alertType, "status_code" : statusCode, "severity" : severity, "web_id" : webId, 'deface_at' : diff, 'dangerous_word' : suspicious }).sort('update_at',-1) try: res = list(cur)[0] except: flag = 1 if flag == 0: if res['false_positive'] == 1 and res['type'] == "defaced" and not check_diff(res['deface_at'], diff): print "Detect FALSE ALERT" return False elif (time.time() - int(res['insert_time'])) > 60*60*24 : flag = 1 elif res['events'][len(res['events']) - 1]['end_at'] != u"thời điểm hiện tại" and res['false_positive'] == 0: temp = {"start_at": startTime, "end_at": "thời điểm hiện tại"} res['events'].append(temp) db.alert.update({ "_type" : url, "_index" : setting.agentName, "type" : alertType, "status_code" : statusCode, "insert_time" : res['insert_time'], "severity" : res['severity'], "web_id" : webId }, {"$set": { "events" : res['events'], "update_at" : time.time() }}, upsert = False) else: update_last_check(db, alertType, url, webId, statusCode) if flag == 1: if statusCode != 408 and statusCode != 200: image_path = hostName + "_" + str(webId)+"_"+ str(statusCode) + "_" +str(setting.date.day)+"-"+str(setting.date.month)+ "-" + str(setting.date.year) +".jpg" flag_image = os.path.isfile("/var/www/html/web/app/alert_image/" + hostName + "_" +str(webId)+"_"+ str(statusCode) + "_" +str(setting.date.day)+"-"+str(setting.date.month)+ "-" + str(setting.date.year) +".jpg") if flag_image == False: flag_image = take_shot(url, hostName, statusCode, int(webId)) if flag_image == False: image_path = "" cur = db.alert.find({ "_type" : url, "_index" : setting.agentName, "type" : alertType, "severity" : severity, "web_id" : webId }).sort('update_at',-1) try: res = list(cur)[0] if res['events'][len(res['events']) - 1]['end_at'] == u'thời điểm hiện tại': res['events'][len(res['events']) - 1]['end_at'] = str(datetime.now().replace(microsecond=0)).replace(' ','T') db.alert.update({ "_type" : url, "_index" : setting.agentName, "type" : alertType, "insert_time" : res['insert_time'], "severity" : res['severity'], "web_id" : webId }, {"$set": { "events" : res['events'], "update_at" : time.time() }}, upsert = False) time.sleep(1) except: pass describe, referer, alertName = describe_referer(alertType, statusCode, severity) # if severity == 3 and alertType == 'defaced': # alertName = "Lỗi DEFACED - Có sự thay đổi trong cấu trúc website" # if severity == 4 and alertType == 'defaced': # alertName = "Lỗi DEFACED - Phát hiện từ nghi vấn trong cấu trúc website" alert = { '_type' : url, '_index' : setting.agentName, 'type' : alertType, 'name' : alertName, 'is_alerted' : 0, 'false_positive' : 0, 'severity' : severity, 'dst_ip' : des_ip, 'time_response' : timeResponse, 'status_code' : statusCode, 'description' : describe, 'reference' : referer, 'web_id' : webId, 'host_name' : hostName, 'image_name' : image_path, 'events' : [], 'insert_time' : int(time.time()), 'deface_at' : diff, 'dangerous_word' : suspicious } temp = {} temp['start_at'] = startTime temp['end_at'] = 'thời điểm hiện tại' alert['events'].append(temp) alert['update_at'] = time.time() db.alert.insert_one(alert) return True except Exception: error.catchError(traceback.format_exc()) return
def check(self,link,hostName,ipAddress,port, userId, webId, protocol, prevStatus): sql = "" statusCode = 0 state = 0 timeAVG = 0 warning = "" severity = "" timecheck = (time.time())*1000 flag = 1 fail = 0 lastTime = time.time() try: for loop in range(2): try: # print str(time.ctime()) + " : Requesting to " + hostName r = requests.get(link, allow_redirects = True, headers = setting.headers, verify = False, timeout = 20) # print str(time.ctime()) + " : Done " + hostName except: if fail == 0: fail += 1 else: print "%s : Fail to check " % datetime.now(), link warning = "Request Timeout" timeAVG = 0.0 statusCode = 408 state = 0 link = None severity = "medium" try: lastTime = setting.alert408[hostName] except KeyError: setting.alert408[hostName] = time.time() else: try: del setting.alert408[hostName] except KeyError: pass timeAVG = r.elapsed.total_seconds()*1000 if r.status_code == 200: state = 1 if timeAVG > 10000: warning = "Long Initial Connection Time" severity = "low" else: for web in webId: alert.check_status(setting.db, "performance", hostName, web[0]) warning = "Normal" i = 0 # print str(time.ctime()) + " : Checking deface " + hostName for user in userId: deface.check_deface(r.content,hostName, ipAddress, webId[i][0], user[0], link) i += 1 # print str(time.ctime()) + " : Done " + hostName i = 0 for user in userId: action = { "_index" : "user-%d" % user[0], "_type" : "web-%d" % webId[i][0], "_id" : timecheck, "_source" :{ 'host_name' : hostName, 'ip' : ipAddress, 'port' : int(port), 'state' : state, 'time_res' : timeAVG, 'status_code' : r.status_code, 'warning' : warning, 'time' : timecheck, 'user_id' : user[0], 'web_id' : webId[i][0] } } i += 1 setting.ESData.append(action) elif r.status_code > 399 and r.status_code < 500: warning = "Client Error" timeAVG = 0.0 severity = "medium" # print str(time.ctime()) + " : Taking pic " + hostName for eachid in webId: alert.take_shot(hostName, link, r.status_code, eachid[0]) state = 0 flag = 0 # print str(time.ctime()) + " : Done " + hostName elif r.status_code >= 500: warning = "Internal Server Error" timeAVG = 0.0 severity = "high" # print str(time.ctime()) + " : Taking pic " + hostName for eachid in webId: alert.take_shot(hostName, link, r.status_code, eachid[0]) state = 0 # print str(time.ctime()) + " : Done " + hostName flag = 0 statusCode = r.status_code break i = 0 if statusCode == 408 : if time.time() - lastTime > 600: flag = 0 state = 0 setting.MySQLEvent += setting.tmpMySQLEvent[hostName] del setting.tmpMySQLEvent[hostName] for user in userId: tmpSQL = (statusCode, hostName, timecheck, timeAVG, ipAddress, user[0], webId[i][0]) setting.MySQLEvent.append(tmpSQL) i += 1 else: try: setting.tmpMySQLEvent[hostName] except KeyError: setting.tmpMySQLEvent[hostName] = [] for user in userId: tmpSQL = (statusCode, hostName, timecheck, timeAVG, ipAddress, user[0], webId[i][0]) setting.tmpMySQLEvent[hostName].append(tmpSQL) i += 1 else: for user in userId: tmpSQL = (statusCode, hostName, timecheck, timeAVG, ipAddress, user[0], webId[i][0]) setting.MySQLEvent.append(tmpSQL) i += 1 if flag == 0: for web in webId: tmpMongo = ('performance', severity, ipAddress, hostName, timeAVG, statusCode, web[0], link, None, None) setting.MongoData.append(tmpMongo) if prevStatus != state: sql = "UPDATE trackWebsite SET status = %d WHERE hostName = '%s' AND protocol = '%s' AND port = '%s'" % (state,hostName, protocol, port) setting.MySQLUpdate.append(sql) except Exception: error.catchError(traceback.format_exc()) return
def check_deface(newContent, url, name, ipAddress, webId, userId): try: flag = 0 rawContent = base64.b64encode(newContent) newContent = newContent.strip() newContent = re.sub('</', '\n</', newContent) newContent = re.sub('>\s*<', '>\n<', newContent) newContent = re.sub('><', '>\n<', newContent) newContent = newContent.split('\n') newNode = Node.Node() newNode.import_object(newContent) newNode.importContent(newContent) try: result = setting.listWebData[webId] except: newNode = base64.b64encode(cPickle.dumps(newNode)) update = "INSERT INTO webStruct (url, limitTime, isStructed, struct, webId, userId, object) VALUES ('%s' , '%d', '%d', '%s', '%d', '%d', '%s')" % ( url, 1, 0, rawContent, webId, userId, newNode) setting.listObject[webId] = (newNode, rawContent, webId) else: limitTime = result[0] isStructed = int(result[1]) struct = setting.listObject[webId][1] learningTime = setting.listLearnTime[webId] if isStructed == 1: # confirmstruct = base64.b64decode(struct) # confirmstruct = confirmstruct.strip() # confirmstruct = re.sub('</', '\n</', confirmstruct) # confirmstruct = re.sub('>\s*<', '>\n<', confirmstruct) # confirmstruct = re.sub('><', '>\n<', confirmstruct) # confirmstruct = confirmstruct.split('\n') # oldNode = Node.Node() # oldNode.import_object(confirmstruct, hostName) # oldNode.importContent(confirmstruct) oldNode = cPickle.loads( base64.b64decode(setting.listObject[webId][0])) oldNode, tmp = find_struct(oldNode, newNode, 'C', ipAddress, name, webId, url) if tmp == 0: alert.check_status(setting.db, 'defaced', url, webId) else: newContent = oldNode.boderDiffHTML(newContent) else: oldNode = cPickle.loads( base64.b64decode(setting.listObject[webId][0])) oldNode, tmp = find_struct(oldNode, newNode, 'F', ipAddress, name, webId, url) newContent = oldNode.render_html(newContent) struct = "" for each in newContent: if each != None: struct += each + '\n' struct = base64.b64encode(struct) if limitTime <= 480 * learningTime: limitTime += 1 else: isStructed = 1 if tmp == 0: alert.check_status(setting.db, 'defaced', url, webId) else: newContent = oldNode.boderDiffHTML(newContent) oldNode = base64.b64encode(cPickle.dumps(oldNode)) update = "UPDATE webStruct SET isStructed = '%d', struct = '%s', limitTime = '%d', object = '%s' WHERE webId = '%d'" % ( isStructed, struct, limitTime, oldNode, webId) setting.listObject[webId] = (oldNode, struct, webId) setting.MySQLUpdate.append(update) except RuntimeError: print "Hitted maximum recursion depth! Cannot save this object" except Exception: error.catchError(traceback.format_exc()) return