def generate_models(self):
        """ Generate all the individual models. We are related with only one dataset and connection group. """
        # Get the group of connections from the id
        group_of_connections = __group_of_group_of_connections__.get_group(self.get_group_connection_id())

        # For each connection
        for connection in group_of_connections.get_connections():
            # Create its model. Remember that the connection id and the model id is the 4-tuple
            model_id = connection.get_id()
            new_model = Model(model_id)
            # Set the constructor for this model. Each model has a specific way of constructing the states
            new_model.set_constructor(__modelsconstructors__.get_default_constructor())
            for flow in connection.get_flows():
                new_model.add_flow(flow)
            self.models[model_id] = new_model
示例#2
0
    def generate_group_of_models(self):
        if __datasets__.current:
            # Get the id for the current dataset
            dataset_id = __datasets__.current.get_id()
            # We should check that there is a group of connections already for this dataset
            if not __group_of_group_of_connections__.get_group(dataset_id):
                # There are not group of connections for this dataset, just generate it
                print_info(
                    'There were no connections for this dataset. Generate them first.'
                )
                return False

            # Get the id of the groups of connections these models are related to
            group_connection = __group_of_group_of_connections__.get_group(
                dataset_id)
            if group_connection:
                group_connection_id = group_connection.get_id()
            else:
                print_error(
                    'There are no connections for this dataset yet. Please generate them.'
                )

            # The id of this group of models is the id of the dataset + the id of the model constructor. Because we can have the same connnections modeled by different constructors.
            group_of_models_id = str(dataset_id) + '-' + str(
                __modelsconstructors__.get_default_constructor().get_id())

            # Do we have the group of models for this id?
            try:
                group_of_models = self.group_of_models[group_of_models_id]
            except KeyError:
                # First time.
                # Create the group of models
                group_of_models = Group_of_Models(group_of_models_id)
                # Set the group of connections they will be using
                group_of_models.set_group_connection_id(group_connection_id)
                # Set the dataset id for this group of models
                group_of_models.set_dataset_id(dataset_id)
                # Store the model
                self.group_of_models[group_of_models_id] = group_of_models
                # Update the dataset to include this group of models
                __datasets__.current.add_group_of_models(group_of_models_id)

            # Generate the models
            group_of_models.generate_models()
        else:
            print_error('There is no dataset selected.')
示例#3
0
    def generate_models(self):
        """ Generate all the individual models. We are related with only one dataset and connection group. """
        # Get the group of connections from the id
        group_of_connections = __group_of_group_of_connections__.get_group(
            self.get_group_connection_id())

        # For each connection
        for connection in group_of_connections.get_connections():
            # Create its model. Remember that the connection id and the model id is the 4-tuple
            model_id = connection.get_id()
            new_model = Model(model_id)
            # Set the constructor for this model. Each model has a specific way of constructing the states
            new_model.set_constructor(
                __modelsconstructors__.get_default_constructor())
            for flow in connection.get_flows():
                new_model.add_flow(flow)
            self.models[model_id] = new_model
    def generate_group_of_models(self):
        if __datasets__.current:
            # Get the id for the current dataset
            dataset_id = __datasets__.current.get_id()
            # We should check that there is a group of connections already for this dataset
            if not __group_of_group_of_connections__.get_group(dataset_id):
                # There are not group of connections for this dataset, just generate it
                print_info('There were no connections for this dataset. Generate them first.')
                return False

            # Get the id of the groups of connections these models are related to
            group_connection = __group_of_group_of_connections__.get_group(dataset_id)
            if group_connection:
                group_connection_id = group_connection.get_id()
            else:
                print_error('There are no connections for this dataset yet. Please generate them.')

            # The id of this group of models is the id of the dataset + the id of the model constructor. Because we can have the same connnections modeled by different constructors.
            group_of_models_id = str(dataset_id) + '-' + str(__modelsconstructors__.get_default_constructor().get_id())

            # Do we have the group of models for this id?
            try:
                group_of_models = self.group_of_models[group_of_models_id]
            except KeyError:
                # First time.
                # Create the group of models
                group_of_models = Group_of_Models(group_of_models_id)
                # Set the group of connections they will be using
                group_of_models.set_group_connection_id(group_connection_id)
                # Set the dataset id for this group of models
                group_of_models.set_dataset_id(dataset_id)
                # Store the model
                self.group_of_models[group_of_models_id] = group_of_models
                # Update the dataset to include this group of models
                __datasets__.current.add_group_of_models(group_of_models_id)

            # Generate the models
            group_of_models.generate_models()
        else:
            print_error('There is no dataset selected.')
 def visualize_dataset(self, dataset_id, multiplier, filter):
     # Get the netflow file
     self.dataset = __datasets__.get_dataset(dataset_id)
     try:
         self.binetflow_file = self.dataset.get_file_type('binetflow')
     except AttributeError:
         print_error('That testing dataset does no seem to exist.')
         return False
     # Open the file
     try:
         file = open(self.binetflow_file.get_name(), 'r')
         self.setup_screen()
     except IOError:
         print_error('The binetflow file is not present in the system.')
         return False
     # construct filter
     self.construct_filter(filter)
     # Clean the previous models from the constructor
     __modelsconstructors__.get_default_constructor().clean_models()
     # Remove the header
     header_line = file.readline().strip()
     # Find the separation character
     self.find_separator(header_line)
     # Extract the columns names
     self.find_columns_names(header_line)
     line = ','.join(file.readline().strip().split(',')[:14])
     #logfile = open('log','w')
     while line:
         # Using our own extract_columns function makes this module more independent
         column_values = self.extract_columns_values(line)
         # Extract its 4-tuple. Find (or create) the tuple object
         tuple4 = column_values['SrcAddr']+'-'+column_values['DstAddr']+'-'+column_values['Dport']+'-'+column_values['Proto']
         # Get the _local_ model. We don't want to mess with the real models in the database, but we need the structure to get the state
         model = self.get_model(tuple4)
         # filter
         if not self.apply_filter(tuple4):
             line = ','.join(file.readline().strip().split(',')[:14])
             continue
         if not model:
             model = Model(tuple4)
             self.set_model(model)
             constructor_id = __modelsconstructors__.get_default_constructor().get_id()
             # Warning, here we depend on the modelsconstrutor
             model.set_constructor(__modelsconstructors__.get_constructor(constructor_id))
         flow = Flow(0) # Fake flow id
         flow.add_starttime(column_values['StartTime'])
         flow.add_duration(column_values['Dur'])
         flow.add_proto(column_values['Proto'])
         flow.add_scraddr(column_values['SrcAddr'])
         flow.add_dir(column_values['Dir'])
         flow.add_dstaddr(column_values['DstAddr'])
         flow.add_dport(column_values['Dport'])
         flow.add_state(column_values['State'])
         flow.add_stos(column_values['sTos'])
         flow.add_dtos(column_values['dTos'])
         flow.add_totpkts(column_values['TotPkts'])
         flow.add_totbytes(column_values['TotBytes'])
         try:
             flow.add_srcbytes(column_values['SrcBytes'])
         except KeyError:
             # It can happen that we don't have the SrcBytes column
             pass
         try:
             flow.add_srcUdata(column_values['srcUdata'])
         except KeyError:
             # It can happen that we don't have the srcUdata column
             pass
         try:
             flow.add_dstUdata(column_values['dstUdata'])
         except KeyError:
             # It can happen that we don't have the dstUdata column
             pass
         try:
             flow.add_label(column_values['Label'])
         except KeyError:
             # It can happen that we don't have the label column
             pass
         # Add the flow
         model.add_flow(flow)
         # As fast as we can or with some delay?
         if multiplier != 0.0 and multiplier != -1:
             # Wait some time between flows. 
             last_time = model.get_last_flow_time()
             current_time = datetime.strptime(column_values['StartTime'], '%Y/%m/%d %H:%M:%S.%f')
             if last_time:
                 diff = current_time - last_time
                 wait_time = diff.total_seconds()
                 time.sleep(wait_time / multiplier)
             model.add_last_flow_time(current_time)
             # Wait the necessary time. After the visualization
         elif multiplier == -1:
             time.sleep(0.1)
         # Visualize this model
         self.qscreen.put(model)
         line = ','.join(file.readline().strip().split(',')[:14])
     self.qscreen.put('Stop')
     file.close()
    def cmd_models(self, *args):
        parser = argparse.ArgumentParser(prog="models", description="Manage models", epilog="Manage models")
        parser.add_argument('-s', '--listconstructors', action="store_true", help="List all models constructors available.")
        parser.add_argument('-l', '--listgroups', action="store_true", help="List all the groups of  models. If a dataset is selected it only shows the models in that dataset.")
        parser.add_argument('-g', '--generate', action="store_true", help="Generate the models for the current dataset.")
        parser.add_argument('-d', '--deletegroup', metavar="group_model_id", help="Delete a group of models.")
        parser.add_argument('-D', '--deletemodel', metavar="group_model_id", help="Delete a model (4tuple) from this group. With -D give the id of the group. Use -i to give the model id to delete (4-tuple) or -f to use a filter.")
        parser.add_argument('-i', '--modelid', metavar="model_id", help="Use this model id (4-tuple). Commonly used with -D.")
        parser.add_argument('-L', '--listmodels', metavar="group_model_id", help="List the models inside a group. You can use filters.")
        parser.add_argument('-C', '--countmodels', metavar="group_model_id", help="Count the models inside a group.")
        parser.add_argument('-f', '--filter', metavar="filter", nargs = '+', default="", help="Use this filter to work with models. You can use multiple filter separated by a space. Format: \"variable[=<>]value\". You can use the variables: statelength, name and labelname. For example: -f statelength>100 name=tcp. Another example: -f name=-tcp- labelname=Botnet")
        parser.add_argument('-H', '--histogram', metavar="group_model_id", help="Plot a histogram of the lengths of models states in the given id of group of models.")
        parser.add_argument('-N', '--delnote', metavar='group_model_id', help="Delete completely the note related with this model id. Use -i to give the model id to add the note to (4-tuple).")
        parser.add_argument('-n', '--editnote', metavar='group_model_id', help="Edit the note related with this model id. Use -i to give the model id to add the note to (4-tuple).")
        parser.add_argument('-o', '--listnotes', default=0,  metavar='group_model_id', help="List the notes related with this model id. You can use the -f with filters here.")
        parser.add_argument('-a', '--amountoflettersinstate', default=0, metavar='amount_of_letters', help="When used with -L, limit the maximum amount of letters in the state to show per line. Helps avoiding dangerously long lines.")
        parser.add_argument('-c', '--constructor', metavar="constructor_id", type=int, help="Use this constructor for generating the new models. Use optionally with -g.")


        try:
            args = parser.parse_args(args)
        except:
            return

        # Subcomand to list the constructors
        if args.listconstructors:
            __modelsconstructors__.list_constructors()

        # Subcomand to list the models
        elif args.listgroups:
            __groupofgroupofmodels__.list_groups()

        # Subcomand to generate the models
        elif args.generate:
            if args.constructor != None:
                if __modelsconstructors__.has_constructor_id(args.constructor):
                    constructor = int(args.constructor)
                else:
                    print_error('No such constructor id available.')
                    return False
            else:
                constructor = __modelsconstructors__.get_default_constructor().get_id()
            __groupofgroupofmodels__.generate_group_of_models(constructor)
            __database__.root._p_changed = True

        # Subcomand to delete the group of models of the current dataset
        elif args.deletegroup:
            __groupofgroupofmodels__.delete_group_of_models(args.deletegroup)
            __database__.root._p_changed = True

        # Subcomand to list the models in a group
        elif args.listmodels:
            __groupofgroupofmodels__.list_models_in_group(args.listmodels, args.filter, int(args.amountoflettersinstate))

        # Subcomand to delete a model from a group by id or filter
        elif args.deletemodel:
            # By id or filter?
            if args.modelid:
                # By id
                __groupofgroupofmodels__.delete_a_model_from_the_group_by_id(args.deletemodel, args.modelid)
                __database__.root._p_changed = True
            elif args.filter:
                # By filter
                __groupofgroupofmodels__.delete_a_model_from_the_group_by_filter(args.deletemodel, args.filter)
                __database__.root._p_changed = True
            else:
                print_error('You should provide the id of the model (4-tuple) with -i or a filter with -f')

        # Subcomand to count the amount of models
        elif args.countmodels:
            __groupofgroupofmodels__.count_models_in_group(args.countmodels, args.filter)
            __database__.root._p_changed = True

        # Subcomand to plot histogram of states lengths
        elif args.histogram:
            __groupofgroupofmodels__.plot_histogram(args.histogram, args.filter)

        # Subcomand to edit the note of this model
        elif args.editnote:
            if args.modelid:
                __groupofgroupofmodels__.edit_note(args.editnote, args.modelid)
                __database__.root._p_changed = True
            else:
                print_error('You should give a model id also with -i.')
            
        # Subcomand to delete the note of this model
        elif args.delnote :
            if args.modelid:
                __groupofgroupofmodels__.del_note(args.delnote, args.modelid)
                __database__.root._p_changed = True
            else:
                print_error('You should give a model id also with -i.')

        # Subcomand to list the note of this model
        elif args.listnotes :
            __groupofgroupofmodels__.list_notes(args.listnotes, args.filter)
            __database__.root._p_changed = True
示例#7
0
    def cmd_models(self, *args):
        parser = argparse.ArgumentParser(prog="models", description="Manage models", epilog="Manage models")
        parser.add_argument('-s', '--listconstructors', action="store_true", help="List all models constructors available.")
        parser.add_argument('-l', '--listgroups', action="store_true", help="List all the groups of  models. If a dataset is selected it only shows the models in that dataset.")
        parser.add_argument('-g', '--generate', action="store_true", help="Generate the models for the current dataset.")
        parser.add_argument('-d', '--deletegroup', metavar="group_model_id", help="Delete a group of models.")
        parser.add_argument('-D', '--deletemodel', metavar="group_model_id", help="Delete a model (4tuple) from this group. With -D give the id of the group. Use -i to give the model id to delete (4-tuple) or -f to use a filter.")
        parser.add_argument('-i', '--modelid', metavar="model_id", help="Use this model id (4-tuple). Commonly used with -D.")
        parser.add_argument('-L', '--listmodels', metavar="group_model_id", help="List the models inside a group. You can use filters.")
        parser.add_argument('-C', '--countmodels', metavar="group_model_id", help="Count the models inside a group.")
        parser.add_argument('-f', '--filter', metavar="filter", nargs = '+', default="", help="Use this filter to work with models. You can use multiple filter separated by a space. Format: \"variable[=<>]value\". You can use the variables: statelength, name and labelname. For example: -f statelength>100 name=tcp. Another example: -f name=-tcp- labelname=Botnet")
        parser.add_argument('-H', '--histogram', metavar="group_model_id", help="Plot a histogram of the lengths of models states in the given id of group of models.")
        parser.add_argument('-N', '--delnote', metavar='group_model_id', help="Delete completely the note related with this model id. Use -i to give the model id to add the note to (4-tuple).")
        parser.add_argument('-n', '--editnote', metavar='group_model_id', help="Edit the note related with this model id. Use -i to give the model id to add the note to (4-tuple).")
        parser.add_argument('-o', '--listnotes', default=0,  metavar='group_model_id', help="List the notes related with this model id. You can use the -f with filters here.")
        parser.add_argument('-a', '--amountoflettersinstate', default=0, metavar='amount_of_letters', help="When used with -L, limit the maximum amount of letters in the state to show per line. Helps avoiding dangerously long lines.")
        parser.add_argument('-c', '--constructor', metavar="constructor_id", type=int, help="Use this constructor for generating the new models. Use optionally with -g.")
        parser.add_argument('-e', '--exportasciimodels', metavar="group_model_id", help="Export an ascii list of the all the connections, labels and letters in this Model id. Useful for external analysis.")


        try:
            args = parser.parse_args(args)
        except:
            return

        # Subcomand to list the constructors
        if args.listconstructors:
            __modelsconstructors__.list_constructors()

        # Subcomand to list the models
        elif args.listgroups:
            __groupofgroupofmodels__.list_groups()

        # Subcomand to generate the models
        elif args.generate:
            if args.constructor != None:
                if __modelsconstructors__.has_constructor_id(args.constructor):
                    constructor = int(args.constructor)
                else:
                    print_error('No such constructor id available.')
                    return False
            else:
                constructor = __modelsconstructors__.get_default_constructor().get_id()
            __groupofgroupofmodels__.generate_group_of_models(constructor)
            __database__.root._p_changed = True

        # Subcomand to delete the group of models of the current dataset
        elif args.deletegroup:
            __groupofgroupofmodels__.delete_group_of_models(args.deletegroup)
            __database__.root._p_changed = True

        # Subcomand to list the models in a group
        elif args.listmodels:
            __groupofgroupofmodels__.list_models_in_group(args.listmodels, args.filter, int(args.amountoflettersinstate))

        # Subcommand to export the ascii of the models
        elif args.exportasciimodels:
            __groupofgroupofmodels__.export_models_in_group(args.exportasciimodels, args.filter)

        # Subcomand to delete a model from a group by id or filter
        elif args.deletemodel:
            # By id or filter?
            if args.modelid:
                # By id
                __groupofgroupofmodels__.delete_a_model_from_the_group_by_id(args.deletemodel, args.modelid)
                __database__.root._p_changed = True
            elif args.filter:
                # By filter
                __groupofgroupofmodels__.delete_a_model_from_the_group_by_filter(args.deletemodel, args.filter)
                __database__.root._p_changed = True
            else:
                print_error('You should provide the id of the model (4-tuple) with -i or a filter with -f')

        # Subcomand to count the amount of models
        elif args.countmodels:
            __groupofgroupofmodels__.count_models_in_group(args.countmodels, args.filter)
            __database__.root._p_changed = True

        # Subcomand to plot histogram of states lengths
        elif args.histogram:
            __groupofgroupofmodels__.plot_histogram(args.histogram, args.filter)

        # Subcomand to edit the note of this model
        elif args.editnote:
            if args.modelid:
                __groupofgroupofmodels__.edit_note(args.editnote, args.modelid)
                __database__.root._p_changed = True
            else:
                print_error('You should give a model id also with -i.')
            
        # Subcomand to delete the note of this model
        elif args.delnote :
            if args.modelid:
                __groupofgroupofmodels__.del_note(args.delnote, args.modelid)
                __database__.root._p_changed = True
            else:
                print_error('You should give a model id also with -i.')

        # Subcomand to list the note of this model
        elif args.listnotes :
            __groupofgroupofmodels__.list_notes(args.listnotes, args.filter)
            __database__.root._p_changed = True
示例#8
0
 def visualize_dataset(self, dataset_id, multiplier, filter):
     # Get the netflow file
     self.dataset = __datasets__.get_dataset(dataset_id)
     try:
         self.binetflow_file = self.dataset.get_file_type('binetflow')
     except AttributeError:
         print_error('That testing dataset does no seem to exist.')
         return False
     # Open the file
     try:
         file = open(self.binetflow_file.get_name(), 'r')
         self.setup_screen()
     except IOError:
         print_error('The binetflow file is not present in the system.')
         return False
     # construct filter
     self.construct_filter(filter)
     # Clean the previous models from the constructor
     __modelsconstructors__.get_default_constructor().clean_models()
     # Remove the header
     header_line = file.readline().strip()
     # Find the separation character
     self.find_separator(header_line)
     # Extract the columns names
     self.find_columns_names(header_line)
     line = ','.join(file.readline().strip().split(',')[:14])
     #logfile = open('log','w')
     while line:
         # Using our own extract_columns function makes this module more independent
         column_values = self.extract_columns_values(line)
         # Extract its 4-tuple. Find (or create) the tuple object
         tuple4 = column_values['SrcAddr']+'-'+column_values['DstAddr']+'-'+column_values['Dport']+'-'+column_values['Proto']
         # Get the _local_ model. We don't want to mess with the real models in the database, but we need the structure to get the state
         model = self.get_model(tuple4)
         # filter
         if not self.apply_filter(tuple4):
             line = ','.join(file.readline().strip().split(',')[:14])
             continue
         if not model:
             model = Model(tuple4)
             self.set_model(model)
             constructor_id = __modelsconstructors__.get_default_constructor().get_id()
             # Warning, here we depend on the modelsconstrutor
             model.set_constructor(__modelsconstructors__.get_constructor(constructor_id))
         flow = Flow(0) # Fake flow id
         flow.add_starttime(column_values['StartTime'])
         flow.add_duration(column_values['Dur'])
         flow.add_proto(column_values['Proto'])
         flow.add_scraddr(column_values['SrcAddr'])
         flow.add_dir(column_values['Dir'])
         flow.add_dstaddr(column_values['DstAddr'])
         flow.add_dport(column_values['Dport'])
         flow.add_state(column_values['State'])
         flow.add_stos(column_values['sTos'])
         flow.add_dtos(column_values['dTos'])
         flow.add_totpkts(column_values['TotPkts'])
         flow.add_totbytes(column_values['TotBytes'])
         try:
             flow.add_srcbytes(column_values['SrcBytes'])
         except KeyError:
             # It can happen that we don't have the SrcBytes column
             pass
         try:
             flow.add_srcUdata(column_values['srcUdata'])
         except KeyError:
             # It can happen that we don't have the srcUdata column
             pass
         try:
             flow.add_dstUdata(column_values['dstUdata'])
         except KeyError:
             # It can happen that we don't have the dstUdata column
             pass
         try:
             flow.add_label(column_values['Label'])
         except KeyError:
             # It can happen that we don't have the label column
             pass
         # Add the flow
         model.add_flow(flow)
         # As fast as we can or with some delay?
         if multiplier != 0.0 and multiplier != -1:
             # Wait some time between flows. 
             last_time = model.get_last_flow_time()
             current_time = datetime.strptime(column_values['StartTime'], '%Y/%m/%d %H:%M:%S.%f')
             if last_time:
                 diff = current_time - last_time
                 wait_time = diff.total_seconds()
                 time.sleep(wait_time / multiplier)
             model.add_last_flow_time(current_time)
             # Wait the necessary time. After the visualization
         elif multiplier == -1:
             time.sleep(0.1)
         # Visualize this model
         self.qscreen.put(model)
         line = ','.join(file.readline().strip().split(',')[:14])
     self.qscreen.put('Stop')
     file.close()