Example #1
0
    def test_prepare_traffic(self):
        job = Job()
        self.assertRaises(NoDataSourceException, job.prepare_traffic)

        job = Job(data_source=FileDataSource())
        self.assertRaises(FilePathException, job.prepare_traffic)

        data_source = FileDataSource()
        parser = JobParser(data_source=data_source)
        reader = CSVReader()
        reader.parser = parser
        for job in parser.parse(
                path=os.path.join(os.path.dirname(gnmutils_tests.__file__),
                                  "data/c00-001-001/1/1-process.csv")):
            job.prepare_traffic()
        count = 0
        for process in job.processes():
            count += len(process.traffic)
        self.assertEqual(count, 3155)
        self.assertEqual(job.db_id, "1")
        self.assertEqual(job.job_id, 4165419)
        self.assertEqual(job.gpid, 30726)
        self.assertEqual(job.uid, 14808)
        self.assertEqual(job.tme, 1405011331)
        self.assertEqual(job.exit_tme, 1405065581)
        self.assertEqual(job.exit_code, 0)
        self.assertEqual(len(job.faulty_nodes), 1)
        job.regenerate_tree()
Example #2
0
def _generate_network_statistics(kwargs):
    with ExceptionFrame():
        data_source = FileDataSource()
        path = kwargs.get("path", None)
        output_path = kwargs.get("output_path", None)
        for stats in data_source.network_statistics(path=path, stateful=True):
            data_source.write_network_statistics(data=stats, path=output_path)
Example #3
0
    def test_from_job(self):
        file_path = os.path.join(
            os.path.dirname(assess_tests.__file__),
            "data/c01-007-102/1/1-process.csv"
        )
        data_source = FileDataSource()
        for job in data_source.jobs(path=file_path):
            prototype = Prototype.from_job(job)
        self.assertIsNotNone(prototype)
        self.assertEqual(prototype.node_count(), 9109)

        last_tme = 0
        for node in prototype.nodes(order_first=True):
            self.assertTrue(last_tme <= node.tme)
            last_tme = node.tme
Example #4
0
 def __init__(self, path, data_source=None):
     self.path = path
     self.data_source = data_source if data_source is not None else FileDataSource(
     )
     self._logger = logging.getLogger('cache.prototypes')
     self.force_refresh = bool(
         os.environ.get('DISS_PROTOTYPE_CACHE_REFRESH', False))
     if self.force_refresh:
         self._logger.warning('Forcefully refreshing caches '
                              '(enabled via $DISS_PROTOTYPE_CACHE_REFRESH)')
     self.preloaded_only = bool(
         os.environ.get('DISS_PROTOTYPE_CACHE_PRELOADED_ONLY', False))
     if self.preloaded_only:
         self._logger.warning(
             'Only working with preloaded caches '
             '(enabled via $DISS_PROTOTYPE_CACHE_PRELOADED_ONLY)')
Example #5
0
 def test_processes_in_order(self):
     data_source = FileDataSource()
     for job in data_source.jobs(path=self._file_path()):
         last_tme = 0
         last_pid = 0
         for process in job.processes_in_order():
             self.assertTrue(process.tme >= last_tme)
             if last_tme == process.tme:
                 # also check for pid
                 self.assertTrue(
                     process.pid > last_pid
                     or ((last_pid + process.pid) % 32768 < 500),
                     "%d vs %d" % (last_pid, process.pid))
                 last_pid = process.pid
             else:
                 last_pid = 0
             last_tme = process.tme
 def test_parsing(self):
     data_source = FileDataSource()
     data_reader = CSVReader()
     parser = NetworkStatisticsParser(data_source=data_source)
     data_reader.parser = parser
     for _ in parser.parse(path=self.traffic_file_path(
     )):  # nothing is returned by networkstatisticsparser
         pass
     for _ in parser.parse(path=self.process_file_path()):
         pass
     count = 0
     for data in parser.pop_data():
         for networkstats in data.values():
             count += networkstats.event_count
     self.assertEqual(count, 19998)
     parser.check_caches()
     parser.clear_caches()
     self.assertEqual(parser.data, {})
Example #7
0
 def test_processes(self):
     data_source = FileDataSource()
     for job in data_source.jobs(path=self._file_path()):
         tree = job.tree
         count = 0
         for node, depth in tree.walkDFS():
             count += 1
             # check pid order of children
             initial = 0
             last_tme = 0
             for process in node.children:
                 self.assertTrue(
                     process.value.pid >= initial
                     or (process.value.pid < initial
                         and last_tme < process.value.tme),
                     "%d: initial %d differs %d (%s)" %
                     (count, initial, process.value.pid, [
                         (child.value.pid, child.value.tme)
                         for child in node.children
                     ]))
                 initial = process.value.pid
                 last_tme = process.value.tme
 def setUp(self):
     self.dataSource = FileDataSource()
     self.path = os.path.join(os.path.dirname(gnmutils_tests.__file__),
                              "data/c00-001-001")