Ejemplo n.º 1
0
 def do_analyze(self, type=None):
     # Create objects
     if K.backend() == "tensorflow":
         with tf.Session(graph = tf.Graph()) as sess:
             model, configuration = self.build_model(type=type)
             # First create the sources of data
             data_helpers = DataHelpers(data_source=configuration['paths'],
                                label=None,
                                tokens_per_line=configuration['tokens_per_line'],
                                number_lines=configuration['number_lines'],
                                samples_per_batch=configuration['samples_per_batch'],
                                seed=configuration['seed'])
             # Get the data sources
             online_generator = data_helpers.get_data_stream(configuration['vocabulary'],
                                                     configuration['input_queue'])
             logging.info("Convolutional intrusion detection: %s" % type)
             result = model.analyze_stream(online_generator,self.output_queue)
     else:
         model, configuration = self.build_model(type=type)
         # First create the sources of data
         data_helpers = DataHelpers(data_source=configuration['paths'],
                                label=None,
                                tokens_per_line=configuration['tokens_per_line'],
                                number_lines=configuration['number_lines'],
                                samples_per_batch=configuration['samples_per_batch'],
                                seed=configuration['seed'])
         # Get the data sources
         online_generator = data_helpers.get_data_stream(configuration['vocabulary'],
                                                     configuration['input_queue'])
         logging.info("Convolutional intrusion detection: %s" % type)
         result = model.analyze_stream(online_generator,self.output_queue)
Ejemplo n.º 2
0
 def analyze_stream(self, data_source, max_length, n_gram, output_queue):
     self.output_queue = output_queue
     data_helpers = DataHelpers(data_source,
                                None,
                                max_length,
                                n_gram,
                                samples_per_batch=None,
                                seed=20)
     data_generator = data_helpers.get_data_stream(self.vocabulary,
                                                   data_source)
     while True:
         data = next(data_generator)
         result = self.model.predict(data)
         self.output_queue.put(result)