Пример #1
0
    def initialize(self, training_values: list):
        self.__smoothing_factor = random.uniform(self.__bounds[0][0],
                                                 self.__bounds[0][1])
        self.__trend_factor = random.uniform(self.__bounds[1][0],
                                             self.__bounds[1][1])

        # initializing smoothed value
        self.__smoothed_value = sum(training_values) / len(training_values)

        # initializing trend value
        self.__trend_value = (training_values[-1] -
                              training_values[0]) / (len(training_values) - 1)

        forecasting_factors_init_guess = np.array(
            [self.__smoothing_factor, self.__trend_factor])

        loss_function = lambda x: self.__sse(training_values, x[0], x[1])

        forecasting_factors = optimize.minimize(loss_function,
                                                forecasting_factors_init_guess,
                                                method="SLSQP",
                                                bounds=self.__bounds)

        self.__smoothing_factor = forecasting_factors.x[0]
        self.__trend_factor = forecasting_factors.x[1]

        utils.colors(
            2, 50,
            "CUSUM DES Smoothing factor:     " + str(self.__smoothing_factor),
            8)
        utils.colors(
            3, 50,
            "CUSUM DES Trend factor:         " + str(self.__trend_factor), 8)
Пример #2
0
    def __check_abrupt_decrease(self):
        last_val = self.__window[-1]

        if self.__attack_ending_cum > 0:
            self.__attack_ending_cum -= 1

        # checking abrupt decrease of new values
        if self.__delta == -1:
            self.__delta = last_val
        else:
            if self.__delta - last_val >= (self.__delta - self._smoothing.get_smoothed_value())/2:
                # got abrupt decrease

                self.__abrupt_decrease_cum += 1

                if self.__abrupt_decrease_cum == self.__stop_alarm_delay:
                    # detected end of attack
                    utils.colors(0, 0, "                                              ", 1)
                    utils.colors(0, 0, "Status: DoS attack ended", 83)
                    self._time_end = time.time()

                    self._under_attack = False
                    self.__abrupt_decrease_cum = 0
                    self.__delta = -1
                    self._test_statistic = 0
                    self._detection_threshold = 0
            else:
                # updating self.__delta with exponentially weighted moving average method

                smoothing_factor = self._smoothing.get_smoothing_factor()
                self.__delta = smoothing_factor * self.__delta + (1 - smoothing_factor) * last_val

                if self.__abrupt_decrease_cum > 0:
                    self.__abrupt_decrease_cum -= 1
Пример #3
0
    def _outlier_processing(self, value: float) -> bool:

        if value > self.__outlier_threshold:
            # outlier threshold exceeded
            # the value is an outlier

            if not self._under_attack:
                # not already under attack

                self.__outlier_cum += 1

                if self.__outlier_cum == self.__start_alarm_delay:
                    # reached required times to detect an attack

                    utils.colors(0, 0, "Status: DoS attack detected", 197)
                    self._time_start = time.time()

                    self.__outlier_cum -= 1
                    self.__z_values.append(self._z)
                    self._under_attack = True
                    self.__alarm_dur += 1

            else:
                self.__alarm_dur += 1

            return True

        if self.__outlier_cum > 0:
            self.__outlier_cum -= 1

        return False
Пример #4
0
    def __init__(self,
                 source: str,
                 parametric=False,
                 time_interval=5,
                 threshold=0.65,
                 verbose=False):

        self._time_interval = time_interval
        self._source = source
        self._threshold = threshold
        self._anomalous_intervals_count = 0
        self._max_volume = 0
        self._min_volume = 0
        self._volumes = []
        self._time_start = 0
        self._time_end = 0

        utils.colors(0, 0, "Status: monitoring...", 5)

        if parametric:
            self._syn_cusum = SYNCusumDetector(threshold=threshold,
                                               verbose=verbose)
        else:
            self._syn_cusum = SYNNPCusumDetector(verbose=verbose)

        self._syn_counter = 0
        self._synack_counter = 0
Пример #5
0
    def __check_ending_with_z(self):
        if self._z <= 0:
            self.__attack_ending_cum += 1

            if self.__attack_ending_cum == self.__stop_alarm_delay:
                # reached required time delay before detect an attack ending
                # detected end of attack
                utils.colors(0, 0, "                                              ", 1)
                utils.colors(0, 0, "Status: DoS attack ended", 83)
                self._time_end = time.time()

                self._under_attack = False
                self._test_statistic = 0
                self.__attack_ending_cum = 0
                self._detection_threshold = 0

                if self.__alarm_dur < 6:
                    self.__alarm_dur = 0
Пример #6
0
    def __write_data_thread(self):
        """
        Calls self.__write_data() periodically in a thread to write data
        saved into internal shared priority queue asynchronously
        """

        if not self.__stopped:
            self.__writing_thread = threading.Timer(self.interval,
                                                    self.__write_data_thread)
            self.__writing_thread.start()

        try:
            self.__write_data()
        except:
            utils.colors(
                8, 0,
                "[Graph mode] - Error while writing to influxdb instance: --graph mode deactivate!",
                12)
            exit(1)
Пример #7
0
    def initialize(self, training_values):

        self.__smoothing_factor = random.uniform(self.__bounds[0][0],
                                                 self.__bounds[0][1])

        # initializing smoothed value
        self.__smoothed_value = sum(training_values) / len(training_values)

        forecasting_factors_init_guess = np.array([self.__smoothing_factor])

        loss_function = lambda x: self.__sse(training_values, x[0])

        forecasting_factors = optimize.minimize(loss_function,
                                                forecasting_factors_init_guess,
                                                method="SLSQP",
                                                bounds=self.__bounds)

        self.__smoothing_factor = forecasting_factors.x[0]
        utils.colors(
            1, 50,
            "Data SES Smoothing factor:     " + str(self.__smoothing_factor),
            8)
Пример #8
0
    def _cusum_detection(self):

        if not self.__start_cusum:
            return

        if not self._under_attack:

            self._test_statistic = max(self._test_statistic + self._z, 0)

            if self._z > 0:
                # adjusting detection threshold

                if self._detection_threshold == 0:
                    self._detection_threshold = self._z * self.__start_alarm_delay
                else:
                    self._detection_threshold = self._detection_threshold / 2 + \
                                                self._z * self.__start_alarm_delay / 2

                if self._test_statistic >= self._detection_threshold:
                    # under attack

                    utils.colors(0, 0, "Status: DoS attack detected", 197)
                    self._under_attack = True
                    self.__alarm_dur += 1
            else:
                self._update_values()

        else:
            # under attack

            # checking end of an attack throughout sign of self.__z

            if self.__alarm_dur < 6:
                self.__check_ending_with_z()
            else:
                self.__check_abrupt_decrease()

            self.__alarm_dur += 1
Пример #9
0
    def _cusum_detection(self):
        self._test_statistic = max(self._test_statistic + self._z, 0)

        if not self._under_attack:
            # checking violation
            if self._test_statistic > self._detection_threshold:
                utils.colors(0, 0, "Status: DoS attack detected", 197)
                self._test_statistic = 0
                self._time_start = time.time()
                self._under_attack = True

        else:
            if self._test_statistic <= self._detection_threshold:
                # violation not detected

                utils.colors(0, 0, "                                              ", 1)
                utils.colors(0, 0, "Status: DoS attack ended", 83)
                self._time_end = time.time()

                self._test_statistic = 0
                self._under_attack = False
Пример #10
0
def main():
    if not len(sys.argv):
        print("[!] Not Enough Arguments!")
        # TODO: Add usage
        sys.exit(0)

    parser = argparse.ArgumentParser()
    parser.add_argument("url", help="URL to test for LFI")
    parser.add_argument("-d", "--data", help="Use data:// technique", action="store_true")
    parser.add_argument("-i", "--input", help="Use input:// technique", action="store_true")
    parser.add_argument("-e", "--expect", help="Use expect:// technique", action="store_true")
    parser.add_argument("-f", "--filter", help="Use filter:// technique", action="store_true")
    parser.add_argument("-p", "--proc", help="Use /proc/self/environ technique", action="store_true")
    parser.add_argument("-a", "--access", help="Apache access logs technique", action="store_true")
    parser.add_argument("-ns", "--nostager", help="execute payload directly, do not use stager", action="store_true")
    parser.add_argument("-r", "--relative", help="use path traversal sequences for attack", action="store_true")
    parser.add_argument("--ssh", help="SSH auth log poisoning", action="store_true")
    parser.add_argument("-l", "--location", help="path to target file (access log, auth log, etc.)")
    parser.add_argument("--cookies", help="session cookies for authentication")

    args = parser.parse_args()

    url = args.url
    nostager = args.nostager
    relative = args.relative
    cookies = args.cookies

    parsed = urllib.parse.urlsplit(url)

    print(colors("[~] Checking Target: {0}".format(parsed.netloc), 93))

    # if ping(parsed.netloc):
    #     print(colors("[+] Target looks alive ", 92))
    # else:
    #     print(colors("[!] Target irresponsive ", 91))
    #     sys.exit(1)

    if not parsed.query:
        print(colors("[!] No GET parameter Provided ", 91))

    # TODO: Find a better way to do these checks
    if args.data:
        print(colors("[~] Testing with data:// ", 93))
        d = data.Data(url, nostager, cookies)
        d.execute_data()
    elif args.input:
        print(colors("[~] Testing with input:// ", 93))
        i = Input.Input(url, nostager, cookies)
        i.execute_input()
    elif args.expect:
        print(colors("[~] Testing with expect:// ", 93))
        e = Expect.Expect(url, nostager, cookies)
        e.execute_expect()
    elif args.proc:
        print(colors("[~] /proc/self/environ Technique Selected!", 93))
        i = proc.Environ(url, nostager, relative, cookies)
        i.execute_environ()
    elif args.access:
        print(colors("[~] Testing for Apache access.log poisoning", 93))
        if not args.location:
            print(colors("[~] Log Location Not Provided! Using Default", 93))
            l = '/var/log/apache2/access.log'
        else:
            l = args.location
        a = accesslog(url, l, nostager, relative, cookies)
        a.execute_logs()
    elif args.ssh:
        print(colors("[~] Testing for SSH log poisoning ", 93))
        if not args.location:
            print(colors("[~] Log Location Not Provided! Using Default", 93))
            l = '/var/log/auth.log'
        else:
            l = args.location
        a = sshlog.SSHLogs(url, l, relative, cookies)
        a.execute_ssh()
    elif args.filter:
        print(colors("[~] Testing with expect://", 93))
        f = Filter.Filter(url, cookies)
        f.execute_filter()
    else:
        print(colors("[!] Please select atleast one technique to test", 91))
        sys.exit(0)
Пример #11
0
def signal_handler(signal, frame):
    print(colors('\n\nYou pressed Ctrl+C!', 91))
    sys.exit(0)
Пример #12
0
        if not args.location:
            print(colors("[~] Log Location Not Provided! Using Default", 93))
            l = '/var/log/apache2/access.log'
        else:
            l = args.location
        a = accesslog(url, l, nostager, relative, cookies)
        a.execute_logs()
    elif args.ssh:
        print(colors("[~] Testing for SSH log poisoning ", 93))
        if not args.location:
            print(colors("[~] Log Location Not Provided! Using Default", 93))
            l = '/var/log/auth.log'
        else:
            l = args.location
        a = sshlog.SSHLogs(url, l, relative, cookies)
        a.execute_ssh()
    elif args.filter:
        print(colors("[~] Testing with expect://", 93))
        f = Filter.Filter(url, cookies)
        f.execute_filter()
    else:
        print(colors("[!] Please select atleast one technique to test", 91))
        sys.exit(0)


if __name__ == "__main__":
    signal.signal(signal.SIGINT, signal_handler)
    print(colors(figlet_format('Liffy v2.0', font='big'), 92))
    print("\n")
    main()
Пример #13
0
def main():

    parser = argparse.ArgumentParser(description="DoSTect allow to detect SYN flooding attack with Parametric/Non Parametric CUSUM change point detection")
    
    # Create an exclusive group: in this group only one parameter can be used at time
    source_group = parser.add_mutually_exclusive_group(required=True)
    source_group.add_argument('-i', '--interface', action='store', dest="interface",
                        help="Network interface from which to perform live capture",
                        metavar="INTERFACE",
                        type=lambda x: is_valid_interface(parser, x))

    source_group.add_argument('-f', '--file', action='store', dest="file",
                        help="Packet capture file", metavar="FILE .pcap/.pcapng",
                        type=lambda x: is_valid_capture(parser, x))

    parser.add_argument('-s', '--slice', dest='interval', action='store',default=5.0,
                        help="Specify duration of time interval observation in seconds (default: 5)")
   
    parser.add_argument("-p", "--parametric",  action='store', dest="param",type=bool, nargs='?',
                        const=True, default=False,
                        help="Flag to set CUSUM Parametric mode")

    parser.add_argument("-g", '--graph',  action='store', dest="graph",type=bool, nargs='?',
                        const=True, default=False,
                        help="Activate influxDB data sender: requires --interface")

    parser.add_argument('-t', '--threshold', action='store', dest="threshold",
                        help="Threshold detection value for CUSUM Parametric mode", type=float)
    
    parser.add_argument('-a', '--address', action='store', dest="address",
                        help=" IPv4 address of attacked machine for PCAP capture: requires --file", type=str)
    
    parser.add_argument("-v", "--verbose",  action='store', dest="verbose",type=bool, nargs='?',
                        const=True, default=False,
                        help="Flag to set verbose output mode")
    
    # Arguments parser
    args = parser.parse_args()

    #Check if can cast slice to int()
    try: 
        int(args.interval)
    except:
        parser.error("%s is not a valid integer time interval!" % str(args.interval))


    # Check if graph mode and file capture both selected
    if (args.graph and args.file is not None):
            parser.error("--graph unable to start with --file [FILE .pcap/.pcapng]")

    # Check file && localaddr dependency
    if (args.file and args.address is None) or (args.interface and args.address is not None):
        parser.error("--pcap requires --address [ADDRESS].")
    
    elif args.file is not None:
        # Check address format
        try: 
            ipaddress.IPv4Address(args.address)
        except:
            parser.error("%s is not an IPv4 address!" % str(args.address))

    # Initialize to default value if None
    if args.threshold is None:
        args.threshold = 5.0

    # Initialize to Graph module if -g mode
    plot = None
    if args.graph:
        try:
            plot = Graph(os.path.join(os.path.dirname(__file__), 'config/influxdb/config.ini'))
        except:
            utils.colors(7,0,"[Graph startup] - Error while connecting to influxdb instance: check your influxd service!", 12)
            sys.exit(1)

    # Set TERM for curses color support
    if os.getenv("TERM") is None:
        os.environ['TERM'] = "xterm-256color"

    # Start live capture if file is None (-i [INTERFACE] mode)
    if args.file is None:
        analyzer = LiveCatcher(
            source=str(args.interface),
            plot=plot,
            parametric=args.param,
            time_interval=int(args.interval),
            threshold=float(args.threshold),
            verbose=bool(args.verbose)
        )
    else:
        # Start analyzer from PCAP capture (-f [FILE] mode)
        analyzer = OfflineCatcher(
            source=str(args.file),
            ipv4_address=str(args.address),
            parametric=args.param,
            time_interval=int(args.interval),
            threshold=float(args.threshold),
            verbose=bool(args.verbose)
        )

    def sigint_handler(signum, frame):

        if args.graph:
            plot.stop_writing_thread()
        
        print_statistics()


        exit(0)

    def print_statistics():
        utils.colors(0,0,"                                                        ",5)
        utils.colors(0,0,"Status: monitoring ended",7)
        utils.colors(9,0,"Total intervals:           " + str(analyzer.get_total_intervals()),3)
        utils.colors(10,0,"Anomalous intervals count: " + str(analyzer.get_anomalous_intervals_count()),3)
        utils.colors(12,0,"Max volume reached:        " + str(analyzer.get_max_volume()),3)
        utils.colors(13,0,"Mean volume reached:       " + str(analyzer.get_mean_volume()),3)

        start_time = analyzer.get_time_start()
        end_time =  analyzer.get_time_end()

        if args.file is None and start_time != 0 and end_time != 0:
            utils.colors(14,0,"Attack start detected at:       " + str(datetime.fromtimestamp(start_time)),12)
            utils.colors(15,0,"End attack detected at:         " + str(datetime.fromtimestamp(end_time)),12)

    # Register handler for SIGINT
    signal.signal(signal.SIGINT, sigint_handler)
    
    try:
        # Start analyzer
        analyzer.start()
    except (KeyboardInterrupt, SystemExit):
        sys.exit()

    print_statistics()
Пример #14
0
    def print_statistics():
        utils.colors(0,0,"                                                        ",5)
        utils.colors(0,0,"Status: monitoring ended",7)
        utils.colors(9,0,"Total intervals:           " + str(analyzer.get_total_intervals()),3)
        utils.colors(10,0,"Anomalous intervals count: " + str(analyzer.get_anomalous_intervals_count()),3)
        utils.colors(12,0,"Max volume reached:        " + str(analyzer.get_max_volume()),3)
        utils.colors(13,0,"Mean volume reached:       " + str(analyzer.get_mean_volume()),3)

        start_time = analyzer.get_time_start()
        end_time =  analyzer.get_time_end()

        if args.file is None and start_time != 0 and end_time != 0:
            utils.colors(14,0,"Attack start detected at:       " + str(datetime.fromtimestamp(start_time)),12)
            utils.colors(15,0,"End attack detected at:         " + str(datetime.fromtimestamp(end_time)),12)
Пример #15
0
    def __init__(self, config_file, bucket_name="dostect", time_interval=1):
        """
        Called by traffic catching classes.
        Connect to influxdb2 throughout a given config.ini file.
        Provides a shared priority queue for TCP volume computed by detection algorithms
        and retrieve every time_interval sec these data to create plotting point to write in bucket_name

        :param config_file: path to config file (.ini)
        :param bucket_name: influxdb bucket's name
        :param time_interval: time interval provided by input
        """

        self.interval = time_interval
        self.bucket_name = bucket_name
        self.org = ""
        self.write_api = None
        self.tcp_queue = []
        self._timer = None
        self.__stopped = False
        self.__writing_thread = None

        client = None
        result = 0
        try:
            # Load influx configuration from .ini file: retrieve HOST:PORT, ORG ID, ACCESS TOKEN
            client = influxdb_client.InfluxDBClient.from_config_file(
                config_file=config_file)

            self.org = client.org

            # Creating buckets API for buckets access
            bucket = client.buckets_api()

            # Checks if bucket bucket_name already exists, else create it
            try:  #API call to InfluxCloud return an exception
                result = bucket.find_bucket_by_name(self.bucket_name)
            except:
                bucket.create_bucket(bucket_name=self.bucket_name)
                utils.colors(
                    8, 0,
                    "[Graph mode] - Bucket " + self.bucket_name + " created!",
                    3)

            #API call to local influxd service return None
            if result is None:
                bucket.create_bucket(bucket_name=self.bucket_name)
                utils.colors(
                    8, 0,
                    "[Graph mode] - Bucket " + self.bucket_name + " created!",
                    3)

        except Exception:
            raise Exception(
                "Error while connecting to influxdb instance: check your service or .ini file!"
            )

        # Creating write API for points creation
        self.write_api = client.write_api(write_options=SYNCHRONOUS)

        # Start periodical writing thread
        self.__write_data_thread()
Пример #16
0
        print(colors("[~] Testing for Apache access.log poisoning", 93))
        if not args.location:
            print(colors("[~] Log Location Not Provided! Using Default", 93))
            l = '/var/log/apache2/access.log'
        else:
            l = args.location
        a = accesslog(url, l, nostager, relative, cookies)
        a.execute_logs()
    elif args.ssh:
        print(colors("[~] Testing for SSH log poisoning ", 93))
        if not args.location:
            print(colors("[~] Log Location Not Provided! Using Default", 93))
            l = '/var/log/auth.log'
        else:
            l = args.location
        a = sshlog.SSHLogs(url, l, relative, cookies)
        a.execute_ssh()
    elif args.filter:
        print(colors("[~] Testing with expect://", 93))
        f = Filter.Filter(url, cookies)
        f.execute_filter()
    else:
        print(colors("[!] Please select atleast one technique to test", 91))
        sys.exit(0)


if __name__ == "__main__":
    signal.signal(signal.SIGINT, signal_handler)
    print(colors(figlet_format('LFI', font='big'), 92))
    print("\n")
    main()
Пример #17
0
    def analyze(self, syn_count: int, *args):
        self.intervals += 1
        self.update(syn_count)

        utils.clean_line_end()
        utils.colors(1, 0,     "Interval number:     " + str(self.intervals), 8)
        utils.colors(2, 0,     "SYN volume:          " + str(self._test_statistic), 8)
        utils.colors(3, 0,     "SYN Threshold:       " + str(self._detection_threshold), 8)
        if self._verbose:
            utils.colors(4, 0, "SYN Value:           " + str(syn_count), 8)
            utils.colors(5, 0, "SYN Zeta:            " + str(self._z), 8)
            utils.colors(6, 0, "SYN Sigma:           " + str(self._sigma), 8)
            utils.colors(7, 0, "SYN Mu:              " + str(self._smoothing.get_smoothed_value()), 8)

        return self._test_statistic, self._detection_threshold