def __show_anomalies(): connection = DatabaseConnection.init_db_connection() cursor = connection.cursor() cursor.execute("select username, level_id, fail_count, success_count, " "value, dateOccurred_from, dateOccurred_to " "from anomaly join userData on userData.id = anomaly.data_id " "join user on user.id = userData.user_id " "where type=1") output = cursor.fetchall() cursor.execute("select ip_address, level_id, fail_count, success_count, " "value, dateOccurred_from, dateOccurred_to " "from anomaly join addressData on addressData.id = anomaly.data_id " "join address on address.id = addressData.ip_address_id " "where type=2") output += cursor.fetchall() print '_'*109 print "|{:15} | {:6} | {:10} | {:14} | {:12} | {:16} | {:16}|"\ .format("User or Ip", "Level", "Fail rate", "Success rate", "Metric value", "Date from", "Date to") for item in output: if item[5]: print "|{:15} | {:6} | {:10} | {:14} | {:12} | {:16} | {:16}|"\ .format(item[0], item[1], item[2], item[3], item[4], item[5].strftime("%Y-%m-%d %H:%M"), item[6].strftime("%Y-%m-%d %H:%M")) else: print "|{:15} | {:6} | {:10} | {:14} | {:12} | {:16} | {:16}|"\ .format(item[0], item[1], item[2], item[3], item[4], item[5], item[6]) print '_'*109
def read_file(self): try: self.logger.info("Opening log file") log = open(config.logname, "r") except IOError: self.logger.error("Unable to open file right now. Please check if you " "have permissions to read this file or if specified correct path to log file") return self.logger.info("Started analysis of log {} at {}".format(log.name, datetime.datetime.now())) connection = DatabaseConnection.init_db_connection() lines = log.readlines() lines = self.__crop_logfile(lines) self.logger.info("Log records to analyse: {}.".format(len(lines))) for item in lines: self.__parse_message(item) if not log.closed: log.close() self.logger.info( """Log analysis completed at {}. File was successfully closed.""".format(datetime.datetime.now())) self.__insert_data(connection) self.__clear_data_lists() if connection: connection.close()
def __stats_insert_data(self): connection = DatabaseConnection.init_db_connection() cursor = connection.cursor() cursor.execute("SELECT count(*) from user") result = cursor.fetchone() self.stats_label1.set_text("Total number of users logged during running of application: {}" .format(int(result[0]))) cursor.execute("SELECT count(*) from address") result = cursor.fetchone() self.stats_label2.set_text("Total number of IP addresses used for login to system: {}" .format(int(result[0]))) cursor.execute("SELECT SUM(success_count) from userData") result = cursor.fetchone() self.stats_label3.set_text("Total number of successful logins to system: {}" .format(int(result[0]))) cursor.execute("SELECT SUM(fail_count) from userData") result = cursor.fetchone() self.stats_label4.set_text("Total number of failed logins to system: {}" .format(int(result[0]))) cursor.execute("SELECT count(*) from anomaly") result = cursor.fetchone() self.stats_label5.set_text("Total number of detected anomalies: {}" .format(int(result[0]))) cursor.execute("SELECT sum(success_count+fail_count) from userData") result = cursor.fetchone() self.stats_label6.set_text("Total number of analysed log records: {}" .format(int(result[0]))) cursor.execute("SELECT count(*) from blockingAccount where status='blocked'") result = cursor.fetchone() self.stats_label7.set_text("Number of blocked user accounts: {}" .format(int(result[0]))) cursor.execute("SELECT count(*) from blockingAddress where status='blocked'") result = cursor.fetchone() self.stats_label8.set_text("Number of blocked IP addresses: {}" .format(int(result[0])))
def __show_address_data(): connection = DatabaseConnection.init_db_connection() cursor = connection.cursor() cursor.execute("select ip_address, fail_count, success_count, " "dateOccurred_from, dateOccurred_to from addressData " "join address on addressData.ip_address_id = address.id") output = cursor.fetchall() print '_'*84 print "|{:15} | {:10} | {:14} | {:16} | {:16}|"\ .format("Ip address", "Fail rate", "Success rate", "Date from", "Date to") for item in output: if item[3]: print "|{:15} | {:10} | {:14} | {:16} | {:16}|"\ .format(item[0], item[1], item[2], item[3].strftime("%Y-%m-%d %H:%M"), item[4].strftime("%Y-%m-%d %H:%M")) else: print "|{:15} | {:10} | {:14} | {:16} | {:16}|"\ .format(item[0], item[1], item[2], item[3], item[4]) print '_'*84
def create_anomaly_model(self, level): store = Gtk.ListStore(str, str, str, str, str, str) connection = DatabaseConnection.init_db_connection() cursor = connection.cursor() cursor.execute("select username, fail_count, success_count, " "value, dateOccurred_from, dateOccurred_to " "from anomaly join userData on userData.id = anomaly.data_id " "join user on user.id = userData.user_id " "where type=1 and level_id = {}".format(level)) output = cursor.fetchall() cursor.execute("select ip_address, fail_count, success_count, " "value, dateOccurred_from, dateOccurred_to " "from anomaly join addressData on addressData.id = anomaly.data_id " "join address on address.id = addressData.ip_address_id " "where type=2 and level_id = {}".format(level)) output += cursor.fetchall() for item in output: store.append([str(item[0]), str(item[1]), str(item[2]), str(item[3]), str(item[4]), str(item[5])]) return store
def __start_unblocking_thread(self): self.logger.info("Starting user and IP address unblocking daemon.") cursor = DatabaseConnection.init_db_connection().cursor() user_blocker = UserBlocker() address_blocker = AddressBlocker() while True: # self.logger.info("Executing unblocking iteration") cursor.execute("Select blockingAccount.id, username from blockingAccount " "join user on user.id = blockingAccount.user_id " "where date_unblocked < NOW() and status='blocked'") output = cursor.fetchall() for item in output: user_blocker.unblock_user(item[1], item[0]) cursor.execute("Select blockingAddress.id, ip_address from blockingAddress " "join address on address.id = blockingAddress.ip_address_id " "where date_unblocked < NOW() and status='blocked'") output = cursor.fetchall() for item in output: address_blocker.unblock_address(item[1], item[0]) time.sleep(15)
def compute_metrics(self): self.__logger.info("Search for anomalies started.") __user_blocker = None __address_blocker = None connection = DatabaseConnection.init_db_connection() c = connection.cursor() c.execute("SELECT userData.id, fail_count, success_count, username " "from userData " "join user on user.id = userData.user_id " "where metric_set=0") user_data_to_analyse = c.fetchall() self.__logger.info("User records to recompute: {}".format(c.rowcount)) for record in user_data_to_analyse: c.execute("UPDATE userData set metric_set=1 where id={}".format(int(record[0]))) fail_count = record[1] success_count = record[2] anomaly = Anomaly(success_count, fail_count) if anomaly.is_valid: self.__logger.warn("Anomaly detected. Checking if existing anomaly should be updated, or new created.") c.execute("SELECT id from anomaly where data_id = {} and type=1".format(int(record[0]))) existing_anomaly = c.fetchone() if existing_anomaly: self.__logger.info("Updating anomaly.") self.__update_anomaly(existing_anomaly[0], anomaly, c) else: self.__logger.info("Inserting new anomaly.") self.__insert_anomaly(record[0], anomaly, c, 1) self.__send_alert(anomaly, record[3]) self.__logger.info("New anomaly data stored. Alert was sent according to level of anomaly") if anomaly.level == 3 and config.user_blocking_enabled: if not __user_blocker: __user_blocker = UserBlocker() __user_blocker.block_user(record[3]) c.execute("SELECT addressData.id, fail_count, success_count, ip_address " "from addressData " "join address on address.id = addressData.ip_address_id " "where metric_set=0") ip_data_to_analyse = c.fetchall() self.__logger.info("Ip records to recompute: {}".format(c.rowcount)) for record in ip_data_to_analyse: c.execute("UPDATE addressData set metric_set=1 where id={}".format(int(record[0]))) fail_count = record[1] success_count = record[2] anomaly = Anomaly(success_count, fail_count) if anomaly.is_valid: self.__logger.info("Anomaly detected. Checking if existing anomaly should be updated, or new created.") c.execute("SELECT id from anomaly where data_id = {} and type=2".format(int(record[0]))) existing_anomaly = c.fetchone() if existing_anomaly: self.__logger.info("Updating anomaly.") self.__update_anomaly(existing_anomaly[0], anomaly, c) else: self.__logger.info("Inserting new anomaly.") self.__insert_anomaly(record[0], anomaly, c, 2) self.__send_alert(anomaly, record[3]) self.__logger.info("New anomaly data stored. Alert was sent according to level of anomaly") if anomaly.level == 3 and config.address_blocking_enabled: if not __address_blocker: __address_blocker = AddressBlocker() __address_blocker.block_address(record[3])
def __init__(self): self.__logger = logging.getLogger('SecurityMetricIDS') self.__logger.info("Initializing address blocking utility.") self.__dev_null = open(os.devnull, 'w') self.cursor = DatabaseConnection.init_db_connection().cursor()
def __define_statistics_window(self): self.stats_window = Gtk.Window() self.stats_window.set_size_request(800, 860) background_color = Gdk.color_parse('#bfbfbf') self.stats_window.modify_bg(Gtk.StateType.NORMAL, background_color) self.stats_window.connect("destroy", self.__on_close_stats) self.stats_window.set_resizable(False) self.stats_window.set_title("Statistics information") self.close_btn = Gtk.Button("Close") self.close_btn.set_size_request(150, 40) self.close_btn.set_tooltip_text("Close this window.") self.close_btn.connect("clicked", self.__on_close_stats) self.stats_label1 = Gtk.Label() self.stats_label2 = Gtk.Label() self.stats_label3 = Gtk.Label() self.stats_label4 = Gtk.Label() self.stats_label5 = Gtk.Label() self.stats_label6 = Gtk.Label() self.stats_label7 = Gtk.Label() self.stats_label8 = Gtk.Label() graph_container1 = Gtk.VBox(False, 8) scroll_window1 = Gtk.ScrolledWindow() graph_container1.pack_start(scroll_window1, True, True, 0) graph_container1.set_size_request(800, 220) figure1 = plt.figure(figsize=[0.7, 0.7]) axis1 = figure1.add_subplot(111) connection = DatabaseConnection.init_db_connection() cursor = connection.cursor() cursor.execute("SELECT count(*) from anomaly where level_id = 3") third_lvl_count = cursor.fetchone() cursor.execute("SELECT count(*) from anomaly where level_id = 2") second_lvl_count = cursor.fetchone() cursor.execute("SELECT count(*) from anomaly where level_id = 1") first_lvl_count = cursor.fetchone() labels = 'Critical level anomalies: {}'.format(int(third_lvl_count[0])),\ 'Medium level anomalies: {}'.format(int(second_lvl_count[0])), \ 'Low level anomalies: {}'.format(int(first_lvl_count[0])) sizes = [int(third_lvl_count[0]), int(second_lvl_count[0]), int(first_lvl_count[0])] colors = ['red', 'orange', 'yellow'] explode = (0.03, 0.03, 0.03) axis1.pie(sizes, explode=explode, labels=labels, colors=colors, shadow=True, startangle=10) axis1.set_title("Graphical view of detected anomalies") axis1.axis('equal') axis1.plot() canvas2 = FigureCanvas(figure1) scroll_window1.add_with_viewport(canvas2) graph_container2 = Gtk.VBox(False, 8) scroll_window2 = Gtk.ScrolledWindow() graph_container2.pack_start(scroll_window2, True, True, 0) graph_container2.set_size_request(800, 400) figure2 = plt.figure(figsize=[0.6, 0.6]) axis2 = figure2.add_subplot(211) axis2.set_title("Graphical view of logging process in time.\n Red = Failed logins. Green = Successful logins.") cursor.execute(" select concat(concat(dateOccurred_from, ' - '), time_format(dateOccurred_to,'%H:%i'))" " as Time, sum(success_count), sum(fail_count) from userData where dateOccurred_from is not NULL " " group by dateOccurred_from order by dateOccurred_from ") output = cursor.fetchall() dates = [(r[0]) for r in output] success_values = [int(r[1]) for r in output] fail_values = [int(r[2]) for r in output] x = range(len(dates)) # use number instead of dates in case of too many x values if len(x) < 30: axis2.set_xticks(x) axis2.set_xticklabels(dates, rotation=50) axis2.set_ylabel("Number of login procedures", rotation='vertical') axis2.set_xlabel("Date and time", rotation='horizontal') axis2.plot(x, success_values, "yo-") axis2.plot(x, fail_values, "r.-") canvas2 = FigureCanvas(figure2) scroll_window2.add_with_viewport(canvas2) location = Gtk.Fixed() location.put(self.close_btn, 630, 810) location.put(self.stats_label1, 10, 20) location.put(self.stats_label2, 10, 40) location.put(self.stats_label3, 10, 60) location.put(self.stats_label4, 10, 80) location.put(self.stats_label5, 10, 100) location.put(self.stats_label6, 10, 120) location.put(self.stats_label7, 10, 140) location.put(self.stats_label8, 10, 160) location.put(graph_container1, 10, 190) location.put(graph_container2, 30, 410) self.stats_window.add(location)