def make_observation_features(class_idx, file_name):
    try:
        with open(DATA_PATH + file_name, 'rb') as f:
            comm_up_down, upload_ports, download_ports = pickle.load(f)
    except Exception:
        with open(DATA_PATH + file_name, 'rb') as f:
            comm_up_down = pickle.load(f)

    comm_train, comm_test = breakTrainTest(comm_up_down, oWnd=WINDOW)

    features_comm, oClass_comm = extractFeatures(comm_train, Class=class_idx)
    features_commS, oClass_commS = extractFeaturesSilence(comm_train,
                                                          Class=class_idx)
    features_commW, oClass_commW = extractFeaturesWavelet(comm_train,
                                                          SCALES,
                                                          Class=class_idx)

    return (features_comm, oClass_comm), (features_commS,
                                          oClass_commS), (features_commW,
                                                          oClass_commW)
import sys, os
sys.path.append("..")

from classifier.utils.classify import extractFeatures, extractFeaturesWavelet, extractFeaturesSilence, breakData, \
    generate_name_from_file
from classifier.neuronalNetworks.classify_neuronalNetworks import classify_neuronalNetworks


DATA_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)), "classifier/data/")


if __name__ == '__main__':
    # classify unknown data
    file_name = "netflix_comm_record.bin"

    # classify unknown data
    with open(DATA_PATH + file_name, 'rb') as f:
        comm_up_down, upload_ports, download_ports = pickle.load(f)

    # break data
    break_data = breakData(comm_up_down)

    # extract features of the unknown break data
    features_data = extractFeatures(break_data)[0]
    features_dataS = extractFeaturesSilence(break_data)[0]
    features_dataW = extractFeaturesWavelet(break_data)[0]
    unknown_data_features = np.hstack((features_data, features_dataS, features_dataW))

    # creating train and test data for each Class (YouTube, Browsing and Mining)
    classify_neuronalNetworks(unknown_data_features, printing=True, result=generate_name_from_file(file_name))
Example #3
0
    def classify(pkt):
        global tcp_services, current_host_name, last_timestamp_classify

        # verify if the tcp service has been already registered
        src_port = pkt["tcp.srcport"]
        dst_port = pkt["tcp.dstport"]
        
        # save the port to further update the tcp services dict
        port = dst_port
        
        if src_port in tcp_services.keys():
            # the packet belongs to one tcp service session
            service = tcp_services[src_port]
            port = src_port
        elif dst_port in tcp_services.keys():
            service = tcp_services[dst_port]
            port = dst_port
        else:
            # new tcp session, let's create it
            service = {
                "upload_bytes_counter": 0,
                "download_bytes_counter": 0,
                "last_timestamp": 0,
                "index": 0,
                "data_bytes_counter": np.array([[0, 0]])
            }

        # end verify

        source = pkt["ip.src"]

        if service["index"] == 0:
            if source == current_host_name:
                service["upload_bytes_counter"] += int(pkt["length"])
            else:
                service["download_bytes_counter"] += int(pkt["length"])

            service["last_timestamp"] = pkt["sniff_time"]
        else:
            # index != 0
            timestamp_now = pkt["sniff_time"]
            timestamp_now = timestamp_now.replace(tzinfo=datetime.timezone.utc).timestamp()

            delta_time = ((datetime.datetime.utcfromtimestamp(int(timestamp_now))) - service["last_timestamp"]).total_seconds()

            if delta_time >= 1:  # if passed more than one second, means we have to write n 0
                service["data_bytes_counter"] = np.append(service["data_bytes_counter"],
                                                          [[service["download_bytes_counter"],
                                                            service["upload_bytes_counter"]]], 0)

                for i in range(0, int(delta_time) - 1):
                    service["data_bytes_counter"] = np.append(service["data_bytes_counter"], [[0, 0]], 0)

                service["upload_bytes_counter"] = 0
                service["download_bytes_counter"] = 0

            elif int(service["last_timestamp"].replace(tzinfo=datetime.timezone.utc).timestamp()) == int(
                    timestamp_now):  # if it's the same timestamp, we have to increment
                source = pkt["ip.src"]

                if source == current_host_name:
                    service["upload_bytes_counter"] += int(pkt["length"])

                else:
                    service["download_bytes_counter"] += int(pkt["length"])

            service["last_timestamp"] = pkt["sniff_time"]

        # ack and inc index

        service["index"] += 1

        # update tcp services
        tcp_services[port] = service

        """
        Update the empty seconds for each tcp service that didn't receive any byte
        """
        for port in tcp_services.keys():
            service = tcp_services[port]

            timestamp_now = pkt["sniff_time"]
            timestamp_now = timestamp_now.replace(tzinfo=datetime.timezone.utc).timestamp()

            delta_time = ((datetime.datetime.utcfromtimestamp(int(timestamp_now))) - service["last_timestamp"]).total_seconds()

            if delta_time >= 3:  # if passed more than one second, means we have to write n 0
                service["data_bytes_counter"] = np.append(service["data_bytes_counter"],
                                                          [[service["download_bytes_counter"],
                                                            service["upload_bytes_counter"]]], 0)

                for i in range(0, int(delta_time) - 1):
                    service["data_bytes_counter"] = np.append(service["data_bytes_counter"], [[0, 0]], 0)

                service["upload_bytes_counter"] = 0
                service["download_bytes_counter"] = 0
                service["last_timestamp"] = pkt["sniff_time"]

            tcp_services[port] = service
        """
        When the size_bytes get's some value it will discard  old bytes and only X most recent bytes will be take in account
        """
        delta_time = (datetime.datetime.now() - last_timestamp_classify).total_seconds()

        if delta_time >= 1:
            # if passed more than one second, means we have to write n 0
            message = {}

            for port, service in tcp_services.items():
                if service["data_bytes_counter"].shape[0] >= WINDOW:

                    try:
                        # 121-120: 1: =>120, 2
                        data_bytes_counter = service["data_bytes_counter"][service["data_bytes_counter"].shape[0]-WINDOW:, :]

                        break_data = breakData(data_bytes_counter, oWnd=WINDOW)

                        features_data = extractFeatures(break_data)[0]
                        features_dataS = extractFeaturesSilence(break_data)[0]
                        features_dataW = extractFeaturesWavelet(break_data)[0]

                        # based on distances
                        result = dict(classify_distances(features_data, features_dataS, features_dataW, result="YouTube"))

                        # message
                        sum_results = result["Other"] + result["Mining"]

                        if sum_results > 0:
                            result["Other"] = (result["Other"] / sum_results) * 100
                            result["Mining"] = (result["Mining"] / sum_results) * 100

                            # association with service port
                            message[port] = result
                    except Exception as e:
                        message[port] = "Window size with only zeros ({})".format(service["data_bytes_counter"].shape[0])
                        """
                        print("Error, Port: {} Shape {}".format(port, str(service["data_bytes_counter"].shape)))

                        # print traceback
                        exc_type, exc_value, exc_traceback = sys.exc_info()

                        print("*** print_tb:")
                        traceback.print_tb(exc_traceback, limit=1, file=sys.stdout)

                        print("*** print_exception:")
                        traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2, file=sys.stdout)
                        """
                else:
                    message[port] = "Not enough data ({})".format(service["data_bytes_counter"].shape[0])

            print("\r{}".format(str(json.dumps(message, sort_keys=True, indent=4))))

            send_message = dict()
            send_message["ports"] = message
            send_message["host"] = current_host_name

            for itm in clients:
                itm.write_message(json.dumps(send_message))

            # change last timestamp
            last_timestamp_classify = datetime.datetime.now()