Example #1
0
    def __init__(self, debug=False, ip_list=None, status_code=None):
        """
        Initialize UserFilter.

        Args:
            debug (bool): Log on terminal or not
            ip_list (list):  List of IPs to filter / grab of the log file
            status_code (list): List of status code to filter / grab of the log file

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        if ip_list:
            self.ip = ip_list
        else:
            self.ip = []  # Initialize as empty list

        if status_code:
            self.status_code = [int(status) for status in status_code]
        else:
            self.status_code = []  # Initialize as empty list

        # List of logged IPs
        self.logged_IP = list()  # Don't log these IPs again
Example #2
0
    def __init__(self, debug=False, path=None, window=30):
        """
        Initialize NginxParser class.

        Args:
            debug (bool): Log on terminal or not
            path (str): Path of the log file
            window (int): Days old log file to process

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        if path is not None:
            self.path = path
        else:
            self.logger.log("No log path specified, exiting.", logtype="error")
            sys.exit(0)

        # Convert window (in days) to seconds
        self.window = int(window) * 24 * 3600  # days * hours * seconds

        # Regex for parsing nginx log file
        self.NGINX_RGX = r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).*\[([0-9]' \
                          r'+/[a-zA-Z]+/[0-9]+:[0-9]+:[0-9]+:[0-9]+).*"GET\s(.*)"\s(\d+).*"\s"([^"]+)'

        # Initialize dict for containing parsed data
        self.nginx_dict = dict()
Example #3
0
    def __init__(self, debug=False):
        """
        Initialize SpiderDetect.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(
            __name__,
            debug=debug
        )

        # Path of file containing spider user agents payloads
        self._PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/bad_ua.txt"

        # Load spider user agents payloads
        self.payloads = utils.open_file(self._PAYLOAD_FILE)

        # Initialize threshold to 50 request / second
        self._THRESHOLD = 50  # inter = 0.02

        # List of IPs
        self.logged_IP = list()
Example #4
0
    def __init__(self, debug=False):
        """
        Initialize DDoS.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        # Initialize threshold to 1000 packets per second
        self._SISP_THRESHOLD = 1000  # inter = 0.001
        self._SIMP_THRESHOLD = 100  # 100 different IPs that trigger SISP DoS

        # List of IPs
        self.SISP_LIST = list()

        # Initialize OSINT object
        self.osint_obj = OSINT(debug=debug)
Example #5
0
    def __init__(self, debug=False):
        """
        Initialize SQLi.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(
            __name__,
            debug=debug
        )

        # Path of file containing sqli payloads
        self.PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/sqli.txt"
        # Path of file containing sqli regex rules
        self.REGEX_FILE = "securetea/lib/log_monitor/server_log/rules/regex/sqli.txt"

        # Load sqli payloads
        self.payloads = utils.open_file(self.PAYLOAD_FILE)
        # Load sqli regex rules
        self.regex = utils.open_file(self.REGEX_FILE)

        # Logged IP list
        self.logged_IP = list()
Example #6
0
    def __init__(self, debug=False):
        """
        Initialize PortScan.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        # Path of file containing port_scan payloads
        self.PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/port_scan_ua.txt"

        # Load port_scan payloads
        self.payloads = utils.open_file(self.PAYLOAD_FILE)

        # List of IPs
        self.logged_IP = list()

        # Initialize OSINT object
        self.osint_obj = OSINT(debug=debug)
Example #7
0
    def __init__(self, debug=False):
        """
        Initialize CrossSite.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(
            __name__,
            debug=debug
        )

        # Path of file containing XSS payloads
        self.PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/xss.txt"
        # Path of file containing XSS regex rules
        self.REGEX_FILE = "securetea/lib/log_monitor/server_log/rules/regex/xss.txt"

        # Load XSS payloads
        self.payloads = utils.open_file(self.PAYLOAD_FILE)
        # Load XSS regex rules
        self.regex = utils.open_file(self.REGEX_FILE)

        # Logged IP list
        self.logged_IP = list()

        # Initialize OSINT object
        self.osint_obj = OSINT(debug=debug)
Example #8
0
    def __init__(self, debug=False, test=False):
        """
        Initialize WebShell.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        if test:
            # Path of file containing web_shell payloads
            self.PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/web_shell.txt"
        else:
            # Path of file containing web_shell payloads
            self.PAYLOAD_FILE = "/etc/securetea/log_monitor/server_log/payloads/web_shell.txt"

        # Load web_shell payloads
        self.payloads = utils.open_file(self.PAYLOAD_FILE)

        # Logged IP list
        self.logged_IP = list()

        # Initialize OSINT object
        self.osint_obj = OSINT(debug=debug)
Example #9
0
    def __init__(self,test=False,debug=False):
        """
                    Initialize Ssrf

                    Args:
                        debug (bool): Log on terminal or not

                    Raises:
                        None

                    Returns:
                        None
                    """
        # Initialize logger
        self.logger = ServerLogger(
            __name__,
            debug=debug
        )

        if test:
            # Path of file containing SSRF payloads
            self.PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/ssrf.txt"
            # Path of file containing SSRF regex rules
            self.REGEX_FILE = "securetea/lib/log_monitor/server_log/rules/regex/ssrf.txt"
            # Path of the IP Rules
            self.IP_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/ips.txt"

        else:
            # Path of file containing SSRF payloads
            self.PAYLOAD_FILE = "/etc/securetea/log_monitor/server_log/payloads/ssrf.txt"
            # Path of file containing SSRF regex rules
            self.REGEX_FILE = "/etc/securetea/log_monitor/server_log/regex/ssrf.txt"
            # Path of the IP Rules
            self.IP_FILE = "/etc/securetea/log_monitor/server_log/payloads/ips.txt"



        # Load  SSRF payloads
        self.payloads = utils.open_file(self.PAYLOAD_FILE)
        # Load SSRF regex rules
        self.regex = utils.open_file(self.REGEX_FILE)
        # IPs
        self.ips = utils.open_file(self.IP_FILE)

        # Logged IP list
        self.logged_IP = list()

        # Initialize OSINT object
        self.osint_obj = OSINT(debug=debug)
Example #10
0
    def __init__(self, debug=False):
        """
        Initialize FuzzerDetect.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        # Set threshold to 25 failure attempts / second
        self._THRESHOLD = 25  # inter = 0.04

        # List of IPs
        self.logged_IP = list()
Example #11
0
class PortScan(object):
    """PortScan Class."""
    def __init__(self, debug=False, test=False):
        """
        Initialize PortScan.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        if test:
            # Path of file containing port_scan payloads
            self.PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/port_scan_ua.txt"
        else:
            # Path of file containing port_scan payloads
            self.PAYLOAD_FILE = "/etc/securetea/log_monitor/server_log/payloads/port_scan_ua.txt"

        # Load port_scan payloads
        self.payloads = utils.open_file(self.PAYLOAD_FILE)

        # List of IPs
        self.logged_IP = list()

        # Initialize OSINT object
        self.osint_obj = OSINT(debug=debug)

    def detect_port_scan(self, data):
        """
        Detect possible Port Scan recon attacks.
        Look for a possible port scan user agent payload
        in the user agent field.

        Args:
            data (dict): Parsed log file data

        Raises:
            None

        Returns:
            None
        """
        for ip in data.keys():
            user_agent = data[ip]["ua"]
            if (self.payload_match(user_agent)):
                if ip not in self.logged_IP:
                    self.logged_IP.append(ip)
                    last_time = data[ip]["ep_time"][0]
                    msg = "Possible port scan detected from: " + str(ip) + \
                          " on: " + utils.epoch_to_date(last_time)
                    self.logger.log(msg, logtype="warning")
                    utils.write_ip(str(ip))
                    # Generate CSV report using OSINT tools
                    self.osint_obj.perform_osint_scan(ip.strip(" "))
                    # Write malicious IP to file, to teach Firewall about the IP
                    write_mal_ip(ip.strip(" "))

    def payload_match(self, user_agent):
        """
        Match parsed user agent for a
        possible port scan user agent payload.

        Args:
            user_agent (str): User agent on which to perform
                              payload string matching

        Raises:
            None

        Returns:
            TYPE: bool
        """
        for agent in user_agent:
            for payload in self.payloads:
                payload = payload.strip(" ").strip("\n")
                if payload in agent:
                    return True
Example #12
0
class DDoS(object):
    """DDoS Class."""
    def __init__(self, debug=False):
        """
        Initialize DDoS.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        # Initialize threshold to 1000 packets per second
        self._SISP_THRESHOLD = 1000  # inter = 0.001
        self._SIMP_THRESHOLD = 100  # 100 different IPs that trigger SISP DoS

        # List of IPs
        self.SISP_LIST = list()

        # Initialize OSINT object
        self.osint_obj = OSINT(debug=debug)

    def detect_ddos(self, data):
        """
        Detect DoS attack. Classify DoS attack into two categories:
        - Single IP Single Port DoS Attack
        - Single IP Multiple Port DoS Attack
        Look for IP addresses having high number of GET request and
        a small time difference to predict SISP DoS attack.
        High number of alarms triggered for SISP DoS attack indicates
        MISP DoS attack.

        Args:
            data (dict): Parsed log file data

        Raises:
            None

        Returns:
            None
        """
        for ip in data.keys():
            count = data[ip]["count"]
            last_time = data[ip]["ep_time"][0]
            initial_time = data[ip]["ep_time"][int(
                len(data[ip]["ep_time"]) - 1)]
            delta = abs(int(last_time - initial_time))

            try:
                calc_count_thresh = int(count / delta)
            except ZeroDivisionError:
                calc_count_thresh = int(count)

            if calc_count_thresh > self._SISP_THRESHOLD:  # if crosses threshold, trigger alarm
                msg = "Possible Single IP DoS Attack Detected from: " + \
                       str(ip) + " on: " + utils.epoch_to_date(last_time)
                self.logger.log(msg, logtype="warning")
                if ip not in self.SISP_LIST:
                    self.SISP_LIST.append(ip)
                    # Generate CSV report using OSINT tools
                    self.osint_obj.perform_osint_scan(ip.strip(" "))
                    # Write malicious IP to file, to teach Firewall about the IP
                    write_mal_ip(ip.strip(" "))

            if len(self.SISP_LIST
                   ) > self._SIMP_THRESHOLD:  # if no. of SISP is huge
                for ip in self.SISP_LIST:
                    self.logger.log(
                        "Possible Multiple IP DoS Attack Detected from: " +
                        str(ip),
                        logtype="warning")
Example #13
0
class SQLi(object):
    """SQLi Class."""

    def __init__(self, debug=False):
        """
        Initialize SQLi.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(
            __name__,
            debug=debug
        )

        # Path of file containing sqli payloads
        self.PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/sqli.txt"
        # Path of file containing sqli regex rules
        self.REGEX_FILE = "securetea/lib/log_monitor/server_log/rules/regex/sqli.txt"

        # Load sqli payloads
        self.payloads = utils.open_file(self.PAYLOAD_FILE)
        # Load sqli regex rules
        self.regex = utils.open_file(self.REGEX_FILE)

        # Logged IP list
        self.logged_IP = list()

    def detect_sqli(self, data):
        """
        Detect possible SQL Injection (sqli) attacks.
        Use regex rules and string matching to detect
        SQLi attacks.
        4 Level rules:
            - Simple regex
            - Hex regex
            - Payload string matching
            - URI encoded string matching

        Args:
            data (dict): Parsed log file data

        Raises:
            None

        Returns:
            None
        """
        for ip in data.keys():
            get_req = data[ip]["get"]
            last_time = data[ip]["ep_time"][0]
            if (self.payload_match(get_req) or self.regex_check(get_req)):
                if ip not in self.logged_IP:  # if not logged earlier
                    self.logged_IP.append(ip)
                    msg = "Possible SQL injection (sqli) detected from: " + str(ip) + \
                          " on: " + str(utils.epoch_to_date(last_time))
                    self.logger.log(
                        msg,
                        logtype="warning"
                    )
                    utils.write_ip(str(ip))

    def payload_match(self, get_req):
        """
        Match parsed GET request for a
        possible sqli payload.

        Args:
            get_req (str): GET request on which to perform
                           payload string matching

        Raises:
            None

        Returns:
            TYPE: bool
        """
        for req in get_req:
            for payload in self.payloads:
                payload = payload.strip(" ").strip("\n")
                if (payload in req or
                    utils.uri_encode(payload) in req):
                    return True

    def regex_check(self, get_req):
        """
        Match parsed GET request with
        a sqli regex rules.

        Args:
            get_req (str): GET request on which to perform
                           regex matching

        Raises:
            None

        Returns:
            TYPE: bool
        """
        for req in get_req:
            for reg in self.regex:
                reg = reg.strip(" ").strip("\n")
                if re.findall(reg, req) != []:
                    return True
Example #14
0
class NginxParser(object):
    """NginxParser Class."""
    def __init__(self, debug=False, path=None, window=30):
        """
        Initialize NginxParser class.

        Args:
            debug (bool): Log on terminal or not
            path (str): Path of the log file
            window (int): Days old log file to process

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        if path is not None:
            self.path = path
        else:
            self.logger.log("No log path specified, exiting.", logtype="error")
            sys.exit(0)

        # Convert window (in days) to seconds
        self.window = int(window) * 24 * 3600  # days * hours * seconds

        # Regex for parsing nginx log file
        self.NGINX_RGX = r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}).*\[([0-9]' \
                          r'+/[a-zA-Z]+/[0-9]+:[0-9]+:[0-9]+:[0-9]+).*"GET\s(.*)"\s(\d+).*"\s"([^"]+)'

        # Initialize dict for containing parsed data
        self.nginx_dict = dict()

    def parse(self):
        """
        Parse the log file and save the
        parsed data into a dict.

        Args:
            None

        Raises:
            None

        Returns:
            nginx_dict (dict): Dict containing the parsed
                                data, IP being the key
        """
        # Clear & rotate log file parsed data
        self.nginx_dict.clear()
        self.nginx_log_data = utils.open_file(self.path)
        for line in self.nginx_log_data:
            parsed_data = re.findall(self.NGINX_RGX, line)
            if parsed_data:
                ip = parsed_data[0][0]
                date = parsed_data[0][1].strip(" ")
                day = date.split("/")[0]
                month = date.split("/")[1]
                year = str(date.split("/")[2].split(":")[0])
                last_time = ":".join(str(date.split("/")[2]).split(":")[1:])
                ep_time = utils.get_epoch_time(month, day, year, last_time)
                get = parsed_data[0][2]
                status_code = parsed_data[0][3].strip(" ")
                user_agent = parsed_data[0][4]
                if self.check_within_window(ep_time):
                    self.update_dict(ip, ep_time, get, status_code, user_agent)

        return self.nginx_dict

    def update_dict(self, ip, ep_time, get, status_code, user_agent):
        """
        Update nginx_dict with the values passed.

        Args:
            ip (str): IP address of the source
            ep_time (str): Time of action in epoch time
            get (str): GET request
            status_code (int): Status code of the request
            user_agent (str): User agent of the source

        Raises:
            None

        Returns:
            None
        """
        if self.nginx_dict.get(ip) is None:
            # if new IP address
            self.nginx_dict[ip] = {
                "ep_time": [ep_time],
                "get": [get],
                "status_code": [int(status_code)],
                "ua": [user_agent],
                "count": 1,
                "unique_get": [get]
            }
        else:
            # if IP address already in dict
            prev_count = self.nginx_dict[ip]["count"]
            new_count = prev_count + 1
            self.nginx_dict[ip]["count"] = new_count
            self.nginx_dict[ip]["ep_time"].append(ep_time)
            self.nginx_dict[ip]["get"].append(get)
            if get not in self.nginx_dict[ip]["unique_get"]:
                self.nginx_dict[ip]["unique_get"].append(get)
            self.nginx_dict[ip]["status_code"].append(int(status_code))
            self.nginx_dict[ip]["ua"].append(user_agent)

    def check_within_window(self, ep_time):
        """
        Check whether the time is within the
        specified window.

        Args:
            ep_time (int): Epoch time to check

        Raises:
            None

        Returns:
            TYPE: bool
        """
        current_time = int(time.time())
        if int(current_time - ep_time) < self.window:
            return True
Example #15
0
class FuzzerDetect(object):
    """FuzzerDetect Class."""
    def __init__(self, debug=False):
        """
        Initialize FuzzerDetect.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        # Set threshold to 25 failure attempts / second
        self._THRESHOLD = 25  # inter = 0.04

        # List of IPs
        self.logged_IP = list()

    @staticmethod
    def count_failure(status_code):
        """
        Counts the number of failure status code.

        Args:
            status_code (list): List of status codes

        Raises:
            None

        Returns:
            failure_count (int): Count of failure code
        """
        failure_count = 0
        for code in status_code:
            if (400 <= code < 500):  # if failure code
                failure_count = failure_count + 1
        return failure_count

    def detect_fuzzer(self, data):
        """
        Detect possible URL fuzzing attacks.
        High number of failure codes (400-500) range from an IP
        within a small period of time indicates a possible
        fuzzing attack.

        Args:
            data (dict): Parsed log file data

        Raises:
            None

        Returns:
            None
        """
        for ip in data.keys():
            status_code = data[ip]["status_code"]
            # Count failure attempts for that IP
            failure_count = self.count_failure(status_code)
            last_time = data[ip]["ep_time"][0]
            initial_time = data[ip]["ep_time"][int(
                len(data[ip]["ep_time"]) - 1)]
            delta = abs(int(last_time - initial_time))

            try:
                calc_count_thresh = failure_count / delta
                calc_get_thresh = len(data[ip]["get"]) / delta
            except ZeroDivisionError:
                calc_count_thresh = failure_count
                calc_get_thresh = len(data[ip]["get"])

            if (calc_count_thresh > self._THRESHOLD
                    or calc_get_thresh > self._THRESHOLD):
                if ip not in self.logged_IP:
                    self.logged_IP.append(ip)
                    msg = "Possible URL fuzzing detected from: " + str(ip) + \
                          " on: " + utils.epoch_to_date(data[ip]["ep_time"][0])
                    self.logger.log(msg, logtype="warning")
                utils.write_ip(str(ip))
Example #16
0
class CrossSite(object):
    """CrossSite Class."""
    def __init__(self, debug=False, test=False):
        """
        Initialize CrossSite.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        if test:
            # Path of file containing XSS payloads
            self.PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/xss.txt"
            # Path of file containing XSS regex rules
            self.REGEX_FILE = "securetea/lib/log_monitor/server_log/rules/regex/xss.txt"
        else:
            # Path of file containing XSS payloads
            self.PAYLOAD_FILE = "/etc/securetea/log_monitor/server_log/payloads/xss.txt"
            # Path of file containing XSS regex rules
            self.REGEX_FILE = "/etc/securetea/log_monitor/server_log/regex/xss.txt"

        # Load XSS payloads
        self.payloads = utils.open_file(self.PAYLOAD_FILE)
        # Load XSS regex rules
        self.regex = utils.open_file(self.REGEX_FILE)

        # Logged IP list
        self.logged_IP = list()

        # Initialize OSINT object
        self.osint_obj = OSINT(debug=debug)

    def detect_xss(self, data):
        """
        Detect possible Cross Site Scripting (XSS) attacks.
        Use regex rules and string matching to detect
        XSS attacks.
        4 Level rules:
            - Simple regex
            - Hex regex
            - Payload string matching
            - URI encoded string matching

        Args:
            data (dict): Parsed log file data

        Raises:
            None

        Returns:
            None
        """
        for ip in data.keys():
            get_req = data[ip]["get"]
            last_time = data[ip]["ep_time"][0]
            if (self.payload_match(get_req) or self.regex_check(get_req)):
                if ip not in self.logged_IP:  # if not logged earlier
                    self.logged_IP.append(ip)
                    msg = "Possible Cross Site Scripting (XSS) detected from: " + str(ip) + \
                          " on: " + str(utils.epoch_to_date(last_time))
                    self.logger.log(msg, logtype="warning")
                    utils.write_ip(str(ip))
                    # Generate CSV report using OSINT tools
                    self.osint_obj.perform_osint_scan(ip.strip(" "))
                    # Write malicious IP to file, to teach Firewall about the IP
                    write_mal_ip(ip.strip(" "))

    def payload_match(self, get_req):
        """
        Match parsed GET request for a
        possible XSS payload.

        Args:
            get_req (str): GET request on which to perform
                           payload string matching

        Raises:
            None

        Returns:
            TYPE: bool
        """
        for req in get_req:
            for payload in self.payloads:
                payload = payload.strip(" ").strip("\n")
                if (payload in req or utils.uri_encode(payload) in req):
                    return True

    def regex_check(self, get_req):
        """
        Match parsed GET request with
        a XSS regex rules.

        Args:
            get_req (str): GET request on which to perform
                           regex matching

        Raises:
            None

        Returns:
            TYPE: bool
        """
        for req in get_req:
            for reg in self.regex:
                reg = reg.strip(" ").strip("\n")
                if re.findall(reg, req) != []:
                    return True
Example #17
0
class LFI(object):
    """LFI Class."""
    def __init__(self, debug=False):
        """
        Initialize LFI.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        # Path of file containing lfi payloads
        self.PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/lfi.txt"

        # Load lfi payloads
        self.payloads = utils.open_file(self.PAYLOAD_FILE)

        # Logged IP list
        self.logged_IP = list()

    def detect_lfi(self, data):
        """
        Detect possible Local File Inclusion (lfi) attacks.
        Use string comparison to scan GET request with the
        list of possible LFI payloads.

        Args:
            data (dict): Parsed log file data

        Raises:
            None

        Returns:
            None
        """
        for ip in data.keys():
            get_req = data[ip]["get"]
            if (self.payload_match(get_req)):
                if ip not in self.logged_IP:  # if IP not logged earlier
                    self.logged_IP.append(ip)
                    msg = "Possible LFI injection detected from: " + str(ip) + \
                          " on: " + utils.epoch_to_date(data[ip]["ep_time"][0])
                    self.logger.log(msg, logtype="warning")
                    utils.write_ip(str(ip))

    def payload_match(self, get_req):
        """
        Match parsed GET request for a
        possible lfi payload.

        Args:
            get_req (str): GET request on which to perform
                           payload string matching

        Raises:
            None

        Returns:
            TYPE: bool
        """
        for req in get_req:
            for payload in self.payloads:
                payload = payload.strip(" ").strip("\n")
                if (payload in req or utils.uri_encode(payload) in req):
                    return True
Example #18
0
class WebShell(object):
    """WebShell Class."""
    def __init__(self, debug=False, test=False):
        """
        Initialize WebShell.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        if test:
            # Path of file containing web_shell payloads
            self.PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/web_shell.txt"
        else:
            # Path of file containing web_shell payloads
            self.PAYLOAD_FILE = "/etc/securetea/log_monitor/server_log/payloads/web_shell.txt"

        # Load web_shell payloads
        self.payloads = utils.open_file(self.PAYLOAD_FILE)

        # Logged IP list
        self.logged_IP = list()

        # Initialize OSINT object
        self.osint_obj = OSINT(debug=debug)

    def detect_web_shell(self, data):
        """
        Detect possible Web Shell attacks.
        Use string comparison to scan GET request with the
        list of possible web shell payloads.

        Args:
            data (dict): Parsed log file data

        Raises:
            None

        Returns:
            None
        """
        for ip in data.keys():
            get_req = data[ip]["get"]
            if (self.payload_match(get_req)):
                if ip not in self.logged_IP:  # if not logged earlier
                    self.logged_IP.append(ip)
                    last_time = data[ip]["ep_time"][0]
                    msg = "Possible web shell detected from: " + str(ip) + \
                          " on: " + str(utils.epoch_to_date(last_time))
                    self.logger.log(msg, logtype="warning")
                    utils.write_ip(str(ip))
                    # Generate CSV report using OSINT tools
                    self.osint_obj.perform_osint_scan(ip.strip(" "))
                    # Write malicious IP to file, to teach Firewall about the IP
                    write_mal_ip(ip.strip(" "))

    def payload_match(self, get_req):
        """
        Match parsed GET request for a
        possible web shell payload.

        Args:
            get_req (str): GET request on which to perform
                           payload string matching

        Raises:
            None

        Returns:
            TYPE: bool
        """
        for req in get_req:
            for payload in self.payloads:
                payload = payload.strip(" ").strip("\n")
                if (payload in req or utils.uri_encode(payload) in req):
                    return True
Example #19
0
class SpiderDetect(object):
    """SpiderDetect Class."""

    def __init__(self, debug=False):
        """
        Initialize SpiderDetect.

        Args:
            debug (bool): Log on terminal or not

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(
            __name__,
            debug=debug
        )

        # Path of file containing spider user agents payloads
        self._PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/bad_ua.txt"

        # Load spider user agents payloads
        self.payloads = utils.open_file(self._PAYLOAD_FILE)

        # Initialize threshold to 50 request / second
        self._THRESHOLD = 50  # inter = 0.02

        # List of IPs
        self.logged_IP = list()

    def detect_spider(self, data):
        """
        Detect possible Web Crawler / Spider / Bad user agents.
        High amount of unique GET request from an IP within a
        small period of time are likely to indicate a web crawler /
        spider.

        Look for bad user agents payload to guess a bad user agent.

        Args:
            data (dict): Parsed log file data

        Raises:
            None

        Returns:
            None
        """
        for ip in data.keys():
            count = data[ip]["count"]
            last_time = data[ip]["ep_time"][0]
            initial_time = data[ip]["ep_time"][int(len(data[ip]["ep_time"]) - 1)]
            delta = abs(int(last_time - initial_time))

            try:
                calc_count_thresh = count / delta
                calc_get_thresh = len(data[ip]["unique_get"]) / delta
            except ZeroDivisionError:
                calc_count_thresh = count
                calc_get_thresh = len(data[ip]["unique_get"])

            if (calc_count_thresh > self._THRESHOLD or
                calc_get_thresh > self._THRESHOLD or
                self.payload_match(data[ip]["ua"])):
                if ip not in self.logged_IP:
                    self.logged_IP.append(ip)
                    self.logger.log(
                        "Possible web crawler / spider / bad user agent detected from: " + str(ip),
                        logtype="warning"
                    )
                    utils.write_ip(str(ip))

    def payload_match(self, user_agent):
        """
        Match parsed user agent for a
        possible bad user agent payload.

        Args:
            user_agent (str): User agent on which to perform
                              payload string matching

        Raises:
            None

        Returns:
            TYPE: bool
        """
        for agent in user_agent:
            for payload in self.payloads:
                payload = payload.strip(" ").strip("\n")
                if payload in agent:
                    return True
Example #20
0
class UserFilter(object):
    """UserFilter class."""
    def __init__(self, debug=False, ip_list=None, status_code=None):
        """
        Initialize UserFilter.

        Args:
            debug (bool): Log on terminal or not
            ip_list (list):  List of IPs to filter / grab of the log file
            status_code (list): List of status code to filter / grab of the log file

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        if ip_list:
            self.ip = ip_list
        else:
            self.ip = []  # Initialize as empty list

        if status_code:
            self.status_code = [int(status) for status in status_code]
        else:
            self.status_code = []  # Initialize as empty list

        # List of logged IPs
        self.logged_IP = list()  # Don't log these IPs again

    def filter_user_criteria(self, data):
        """
        Filter / grab data as per user rules
        from the log file on the basis of IP & status code.

        Args:
            data (dict): Parsed log file data

        Raises:
            None

        Returns:
            None
        """
        for ip in data.keys():
            if (ip in self.ip):  # Look for IP match
                # User rule matched
                if ip not in self.logged_IP:  # Logged earlier or not
                    self.logged_IP.append(ip)
                    self.generate_log_report(ip, data)

        for ip in data.keys():  # Look for status code match
            status_code = data[ip]["status_code"]
            for index, code in enumerate(status_code):
                if code in self.status_code:
                    # User rule matched
                    if ip not in self.logged_IP:  # Logged earlier or not
                        self.logged_IP.append(ip)
                        msg = "IP: " + str(ip) + " GET: " + str(data[ip]["get"][index]) + \
                              " " + "Status code: " + str(code) + \
                              " on: " + utils.epoch_to_date(data[ip]["ep_time"][index])
                        self.logger.log(msg, logtype="info")

    def generate_log_report(self, ip, data):
        """
        Log the filtered data in the following format.
        IP: <ip> GET: <get_request> Status Code: <status_code> on: <date>

        Args:
            ip (str): IP address filtered
            data (dict): Log file parsed data

        Raises:
            None

        Returns:
            None
        """
        for index, req in enumerate(data[ip]["get"]):
            msg = "IP: " + str(ip) + " GET: " + str(req) + \
                  " " + "Status Code: " + str(data[ip]["status_code"][index]) + \
                  " on: " + utils.epoch_to_date(data[ip]["ep_time"][index])
            # Log the message
            self.logger.log(msg, logtype="info")
Example #21
0
class Engine(object):
    """ServerLog Monitor Engine."""
    def __init__(self,
                 debug=False,
                 log_type=None,
                 log_file=None,
                 window=30,
                 ip_list=None,
                 status_code=None):
        """
        Initialize ServerLog Monitor Engine.

        Args:
            debug (bool): Log on terminal or not
            log_type (str): Type of log file (Apache, Nginx)
            log_file (str): Path of the log file
            window (int): Days old log to process (default: 30 days)
            ip_list (list): List of IPs to filter / grab of the log file
            status_code (list): List of status code to filter / grab of the log file

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        if log_type is None:
            self.logger.log("No server type selected, exiting.",
                            logtype="error")
            sys.exit(0)

        # Initialize log file path as None
        self.log_file_path = None

        # OS to log file path mapping
        self.system_log_file_map = {
            "apache": {
                "debian": "/var/log/apache2/access.log​",
                "fedora": "/var/log/httpd/access_log",
                "freebsd": "/var/log/httpd-access.log"
            },
            "nginx": {
                "debian": "/​var/log/nginx/access.log"
            }
        }

        if log_file:
            self.log_file_path = str(log_file)
        else:
            os_name = utils.categorize_os()
            if os_name:
                try:
                    self.log_file_path = self.system_log_file_map[log_type][
                        os_name]
                except KeyError:
                    self.logger.log(
                        "Could not find a suitable log file path, exiting.",
                        logtype="error")
                    sys.exit(0)
            else:
                self.logger.log(
                    "OS not recognized, log file path not selected, exiting.",
                    logtype="error")
                sys.exit(0)

        # Create specific parser objects
        if self.log_file_path:  # if log file path is valid
            if log_type == "apache":  # if Apache log file
                self.parser_obj = apache.ApacheParser(debug=debug,
                                                      window=window,
                                                      path=self.log_file_path)
            elif log_type == "nginx":  # if Nginx log file
                self.parser_obj = nginx.NginxParser(debug=debug,
                                                    window=window,
                                                    path=self.log_file_path)

        if self.log_file_path and self.parser_obj:  # if log file path is valid
            # Cross Site Scripting (XSS) Detection
            self.xss_obj = xss.CrossSite(debug=True)
            # SQL injection (SQLi) Detection
            self.sqli_obj = sqli.SQLi(debug=debug)
            # Local File Inclusion (LFI) Detection
            self.lfi_obj = lfi.LFI(debug=debug)
            # Web Shell Detection
            self.web_shell_obj = web_shell.WebShell(debug=debug)
            # Port Scan Detection
            self.port_scan_obj = port_scan.PortScan(debug=debug)
            # URL Fuzzer Detection
            self.fuzzer_obj = fuzzer.FuzzerDetect(debug=debug)
            # Spider / Web Crawler / Bad user agent
            self.spider_obj = spider.SpiderDetect(debug=debug)
            # DDoS Detection
            self.ddos_obj = ddos.DDoS(debug=debug)
            # UserFilter object
            self.user_filter_obj = user_filter.UserFilter(
                debug=debug, ip_list=ip_list, status_code=status_code)

    def run(self):
        """
        Start the ServerLog Monitor Engine.

        Args:
            None

        Raises:
            None

        Returns:
            None
        """
        thread_pool = []  # Collection of all the threads

        while True:  # Run in an endless parent thread loop
            # Parse the logfile
            data = self.parser_obj.parse()

            # Create multiple threads for various detection
            xss_thread = threading.Thread(target=self.xss_obj.detect_xss,
                                          args=(data, ))
            sqli_thread = threading.Thread(target=self.sqli_obj.detect_sqli,
                                           args=(data, ))
            lfi_thread = threading.Thread(target=self.lfi_obj.detect_lfi,
                                          args=(data, ))
            web_shell_thread = threading.Thread(
                target=self.web_shell_obj.detect_web_shell, args=(data, ))
            port_scan_thread = threading.Thread(
                target=self.port_scan_obj.detect_port_scan, args=(data, ))
            fuzzer_thread = threading.Thread(
                target=self.fuzzer_obj.detect_fuzzer, args=(data, ))
            spider_thread = threading.Thread(
                target=self.spider_obj.detect_spider, args=(data, ))
            ddos_thread = threading.Thread(target=self.ddos_obj.detect_ddos,
                                           args=(data, ))
            user_filter_thread = threading.Thread(
                target=self.user_filter_obj.filter_user_criteria,
                args=(data, ))

            # Add created threads to the thread pool
            thread_pool.append(xss_thread)
            thread_pool.append(sqli_thread)
            thread_pool.append(lfi_thread)
            thread_pool.append(web_shell_thread)
            thread_pool.append(port_scan_thread)
            thread_pool.append(fuzzer_thread)
            thread_pool.append(spider_thread)
            thread_pool.append(ddos_thread)
            thread_pool.append(user_filter_thread)

            # Start the thread process
            xss_thread.start()
            sqli_thread.start()
            lfi_thread.start()
            web_shell_thread.start()
            port_scan_thread.start()
            fuzzer_thread.start()
            spider_thread.start()
            ddos_thread.start()
            user_filter_thread.start()

            # Complete the thread execution
            for thread in thread_pool:
                thread.join()
Example #22
0
class Ssrf (object):


    def __init__(self,test=False,debug=False):
        """
                    Initialize Ssrf

                    Args:
                        debug (bool): Log on terminal or not

                    Raises:
                        None

                    Returns:
                        None
                    """
        # Initialize logger
        self.logger = ServerLogger(
            __name__,
            debug=debug
        )

        if test:
            # Path of file containing SSRF payloads
            self.PAYLOAD_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/ssrf.txt"
            # Path of file containing SSRF regex rules
            self.REGEX_FILE = "securetea/lib/log_monitor/server_log/rules/regex/ssrf.txt"
            # Path of the IP Rules
            self.IP_FILE = "securetea/lib/log_monitor/server_log/rules/payloads/ips.txt"

        else:
            # Path of file containing SSRF payloads
            self.PAYLOAD_FILE = "/etc/securetea/log_monitor/server_log/payloads/ssrf.txt"
            # Path of file containing SSRF regex rules
            self.REGEX_FILE = "/etc/securetea/log_monitor/server_log/regex/ssrf.txt"
            # Path of the IP Rules
            self.IP_FILE = "/etc/securetea/log_monitor/server_log/payloads/ips.txt"



        # Load  SSRF payloads
        self.payloads = utils.open_file(self.PAYLOAD_FILE)
        # Load SSRF regex rules
        self.regex = utils.open_file(self.REGEX_FILE)
        # IPs
        self.ips = utils.open_file(self.IP_FILE)

        # Logged IP list
        self.logged_IP = list()

        # Initialize OSINT object
        self.osint_obj = OSINT(debug=debug)

    def detect_ssrf(self , data):
        """
                    Detects  SSRF
                    Args:
                        data (dict): Parsed Log File

                    Raises:
                        None

                    Returns:
                        None
                    """
        for ip in data.keys():
            get_req = data[ip]["get"]
            last_time = data[ip]["ep_time"][0]
            # extracting all the urls in path
            urls=re.findall(r"https?://(?:[-\w.]|(?:%[\da-fA-F]{2}))+", get_req[0])
            for url in urls:
                resolved_ip=utils.resolver(url)
                if resolved_ip:
                    if (self.rmatch(resolved_ip)):
                        if ip not in self.logged_IP:  # if not logged earlier
                            self.logged_IP.append(ip)
                            msg = "Possible SSRF detected From: " + str(ip) + \
                                  " on: " + str(utils.epoch_to_date(last_time))
                            self.logger.log(
                                msg,
                                logtype="warning"
                            )
                            utils.write_ip(str(ip))
                            # Generate CSV report using OSINT tools
                            self.osint_obj.perform_osint_scan(ip.strip(" "))
                            # Write malicious IP to file, to teach Firewall about the IP
                            write_mal_ip(ip.strip(" "))

                if(self.payload_match(url) or self.regex_match(get_req)):
                        if ip not in self.logged_IP:
                            self.logged_IP.append(ip)
                            msg = "Possible SSRF detected From  " + str(ip) + \
                                  " on: " + str(utils.epoch_to_date(last_time))
                            self.logger.log(msg,logtype="warning")
                            utils.write_ip(str(ip))
                            # Generate CSV report using OSINT tools
                            self.osint_obj.perform_osint_scan(ip.strip(" "))
                            # Write malicious IP to file, to teach Firewall about the IP
                            write_mal_ip(ip.strip(" "))


    def payload_match(self,url):
        """
               Match parsed URL from a GET Request to
               possible SSRF payload.

               Args:
                   url (str): url on which to perform
                                  payload string matching

               Raises:
                   None

               Returns:
                   TYPE: bool
               """
        for payloads in self.payloads:
            payload=payloads.strip(" ").strip("\n")
            if (payload in url
                    or utils.uri_encode(payload) in url):
                return True


    def regex_match(self,req):
        """
               Match parsed GET request for a
               possible SSRF regex

               Args:
                   get_req (str): GET request on which to perform
                                  regex string matching

               Raises:
                   None

               Returns:
                   TYPE: bool
               """
        for req in get_req:
            for reg in self.regex:
                reg = reg.strip(" ").strip("\n")
                if re.findall(reg, req) != []:
                    return True


    def rmatch(self,ip):
        """
               Match resolved  IP  for a
               possible SSRF  in IP List.

               Args:
                   ip (str): IP  on which to perform
                                  payload string matching

               Raises:
                   None

               Returns:
                   TYPE: bool
               """
        for payload_ip in self.ips:
            payload_ip=payload_ip.strip(" ").strip("\n")
            if payload_ip in ip:
                return True
Example #23
0
    def __init__(self,
                 debug=False,
                 log_type=None,
                 log_file=None,
                 window=30,
                 ip_list=None,
                 status_code=None):
        """
        Initialize ServerLog Monitor Engine.

        Args:
            debug (bool): Log on terminal or not
            log_type (str): Type of log file (Apache, Nginx)
            log_file (str): Path of the log file
            window (int): Days old log to process (default: 30 days)
            ip_list (list): List of IPs to filter / grab of the log file
            status_code (list): List of status code to filter / grab of the log file

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        if log_type is None:
            self.logger.log("No server type selected, exiting.",
                            logtype="error")
            sys.exit(0)

        # Initialize log file path as None
        self.log_file_path = None

        # OS to log file path mapping
        self.system_log_file_map = {
            "apache": {
                "debian": "/var/log/apache2/access.log​",
                "fedora": "/var/log/httpd/access_log",
                "freebsd": "/var/log/httpd-access.log"
            },
            "nginx": {
                "debian": "/​var/log/nginx/access.log"
            }
        }

        if log_file:
            self.log_file_path = str(log_file)
        else:
            os_name = utils.categorize_os()
            if os_name:
                try:
                    self.log_file_path = self.system_log_file_map[log_type][
                        os_name]
                except KeyError:
                    self.logger.log(
                        "Could not find a suitable log file path, exiting.",
                        logtype="error")
                    sys.exit(0)
            else:
                self.logger.log(
                    "OS not recognized, log file path not selected, exiting.",
                    logtype="error")
                sys.exit(0)

        # Create specific parser objects
        if self.log_file_path:  # if log file path is valid
            if log_type == "apache":  # if Apache log file
                self.parser_obj = apache.ApacheParser(debug=debug,
                                                      window=window,
                                                      path=self.log_file_path)
            elif log_type == "nginx":  # if Nginx log file
                self.parser_obj = nginx.NginxParser(debug=debug,
                                                    window=window,
                                                    path=self.log_file_path)

        if self.log_file_path and self.parser_obj:  # if log file path is valid
            # Cross Site Scripting (XSS) Detection
            self.xss_obj = xss.CrossSite(debug=True)
            # SQL injection (SQLi) Detection
            self.sqli_obj = sqli.SQLi(debug=debug)
            # Local File Inclusion (LFI) Detection
            self.lfi_obj = lfi.LFI(debug=debug)
            # Web Shell Detection
            self.web_shell_obj = web_shell.WebShell(debug=debug)
            # Port Scan Detection
            self.port_scan_obj = port_scan.PortScan(debug=debug)
            # URL Fuzzer Detection
            self.fuzzer_obj = fuzzer.FuzzerDetect(debug=debug)
            # Spider / Web Crawler / Bad user agent
            self.spider_obj = spider.SpiderDetect(debug=debug)
            # DDoS Detection
            self.ddos_obj = ddos.DDoS(debug=debug)
            # UserFilter object
            self.user_filter_obj = user_filter.UserFilter(
                debug=debug, ip_list=ip_list, status_code=status_code)
Example #24
0
    def __init__(self,
                 debug=False,
                 log_type=None,
                 log_file=None,
                 window=30,
                 ip_list=None,
                 status_code=None):
        """
        Initialize ServerLog Monitor Engine.

        Args:
            debug (bool): Log on terminal or not
            type (str): Type of log file (Apache, Nginx)
            log_file (str): Path of the log file
            window (int): Days old log to process
            ip_list (str): List of IPs to filter
            status_code (str): List of status code to filter

        Raises:
            None

        Returns:
            None
        """
        # Initialize logger
        self.logger = ServerLogger(__name__, debug=debug)

        # Check running as root or not
        if not utils.check_root():
            self.logger.log("Please start as root, exiting.", logtype="error")
            sys.exit(0)

        if ip_list:
            ip_list = utils.get_list(ip_list)

        if status_code:
            status_code = utils.get_list(status_code)

        # Check the variables
        if log_file == "":
            log_file = None
        else:
            log_file = log_file.strip(" ")

        if log_type == "":
            log_type = None
        else:
            log_type = log_type.strip(" ")

        if window == "":
            window = 30
        else:
            window = int(window)

        # Create Engine
        self.engine_obj = Engine(debug=debug,
                                 log_type=log_type,
                                 log_file=log_file,
                                 window=window,
                                 ip_list=ip_list,
                                 status_code=status_code)