コード例 #1
0
    def setUp(self):
        # First, create an instance of the Testbed class.
        self.testbed = TestbedWithFiles()
        # Then activate the testbed, which prepares the service stubs for use.
        self.testbed.activate()
        # Next, declare which service stubs you want to use.
        self.testbed.init_blobstore_stub(self.blobstore_dir)

        self.parser = ParseGCLog()
        self.csv_writer = BlobResultWriter()
コード例 #2
0
class ParseGCLogTest(unittest.TestCase):
    """Test cases for writing parsed GC data to CSV files"""
    
    path = os.path.dirname(os.path.abspath(__file__)) + "/"
    result_file = path + "results.csv"
    sample_file = path + "gc-sample.log"

    def setUp(self):
        self.parser = ParseGCLog()
        self.csv_writer = FileResultWriter()
        
    def tearDown(self):
        if os.path.isfile(self.result_file):
            os.remove(self.result_file)

    def test_parse_file(self):
        result = self.parser.parse_file(self.path + "gc-sample.log")
        self.assertEqual(len(result), 7)

    def test_generate_memory_csv(self):
        expected_file = self.path + "expected_mem.csv"
        result = self.parser.parse_file(self.sample_file)
        graph.generate_graph(None, 
            graph.YG_GC_MEMORY, 
            result, 
            self.csv_writer, 
            False, 
            self.result_file)
        self.assertTrue(filecmp.cmp(self.result_file, expected_file))

    def test_generate_gc_reclaimed_csv(self):
        expected_file = self.path + "expected_reclaimed.csv"
        result = self.parser.parse_file(self.sample_file)
        graph.generate_graph(None, 
            graph.MEMORY_RECLAIMED, 
            result, 
            self.csv_writer, 
            False, 
            self.result_file)
        self.assertTrue(filecmp.cmp(self.result_file, expected_file))
        
    def test_generate_gc_duration_csv(self):
        expected_file = self.path + "expected_duration.csv"
        result = self.parser.parse_file(self.sample_file)
        graph.generate_graph(None, 
            graph.GC_DURATION, 
            result, 
            self.csv_writer, 
            False, 
            self.result_file)
        self.assertTrue(filecmp.cmp(self.result_file, expected_file))
コード例 #3
0
    def setUp(self):
        # First, create an instance of the Testbed class.
        self.testbed = TestbedWithFiles()
        # Then activate the testbed, which prepares the service stubs for use.
        self.testbed.activate()
        # Next, declare which service stubs you want to use.
        self.testbed.init_blobstore_stub(self.blobstore_dir)

        self.parser = ParseGCLog()
        self.csv_writer = BlobResultWriter()
コード例 #4
0
class ParseGCLogTest(unittest.TestCase):
    """Test cases for writing parsed GC data to CSV files"""

    path = os.path.dirname(os.path.abspath(__file__)) + "/"
    result_file = path + "results.csv"
    sample_file = path + "gc-sample.log"

    def setUp(self):
        self.parser = ParseGCLog()
        self.csv_writer = FileResultWriter()

    def tearDown(self):
        if os.path.isfile(self.result_file):
            os.remove(self.result_file)

    def test_parse_file(self):
        result = self.parser.parse_file(self.path + "gc-sample.log")
        self.assertEqual(len(result), 7)

    def test_generate_memory_csv(self):
        expected_file = self.path + "expected_mem.csv"
        result = self.parser.parse_file(self.sample_file)
        graph.generate_graph(None, graph.YG_GC_MEMORY, result, self.csv_writer,
                             False, self.result_file)
        self.assertTrue(filecmp.cmp(self.result_file, expected_file))

    def test_generate_gc_reclaimed_csv(self):
        expected_file = self.path + "expected_reclaimed.csv"
        result = self.parser.parse_file(self.sample_file)
        graph.generate_graph(None, graph.MEMORY_RECLAIMED, result,
                             self.csv_writer, False, self.result_file)
        self.assertTrue(filecmp.cmp(self.result_file, expected_file))

    def test_generate_gc_duration_csv(self):
        expected_file = self.path + "expected_duration.csv"
        result = self.parser.parse_file(self.sample_file)
        graph.generate_graph(None, graph.GC_DURATION, result, self.csv_writer,
                             False, self.result_file)
        self.assertTrue(filecmp.cmp(self.result_file, expected_file))
コード例 #5
0
ファイル: gc-analyser.py プロジェクト: romaicus/gc-analyser
    def post(self):
        user = users.get_current_user()

        # We use app.yaml to configure overall authentication
        if not validate_user(user.email()):
            self.redirect(users.create_login_url(self.request.uri))

        start = time.time()

        #file = self.request.body_file
        file = self.request.params["gclog"].file

        log_key = LogData(
            filename=self.request.POST["gclog"].filename,
            notes=self.request.get("notes")).put()

        parser = ParseGCLog()
        gc_results = parser.parse_data(file)

        if len(gc_results) > 0:

            # Generate summary stats for results page
            summary_stats = SummaryStats(gc_results).stats

            # persist gc data - too slow at present with large datasets
            gc_datastore.store_data(log_key, gc_results)

            # persist all CSV data we generate to the store so we 
            # won't have to regenerate later
            blob_writer = BlobResultWriter()

            results_csv_key = graph.generate_cached_graph(log_key, 
                graph.RAW_CSV_DATA,
                gc_results, 
                blob_writer)

            yg_memory_blob_key = graph.generate_cached_graph(log_key, 
                graph.YG_GC_MEMORY, 
                gc_results, 
                blob_writer)

            full_memory_blob_key = graph.generate_cached_graph(log_key, 
                graph.FULL_GC_MEMORY, 
                gc_results, 
                blob_writer)

            gc_duration_blob_key = graph.generate_cached_graph(log_key, 
                graph.GC_DURATION, 
                gc_results, 
                blob_writer)
            
            gc_reclaimed_blob_key = graph.generate_cached_graph(log_key, 
                graph.MEMORY_RECLAIMED, 
                gc_results, 
                blob_writer)

            memory_util_post_blob_key = graph.generate_cached_graph(log_key, 
                graph.MEMORY_UTIL_POST, 
                gc_results, 
                blob_writer)
            
            duration = time.time() - start

            # Pass the key to our results, as the data will be obtained via a 
            template_values = {
                'user': user,
                'logout': users.create_logout_url("/"),
                'duration': duration,
                'name': '/uploads',
                'results_key': str(results_csv_key),
                'summary_stats': summary_stats,
                'gc_results': gc_results,
                'yg_memory_key': str(yg_memory_blob_key),
                'full_memory_key': str(full_memory_blob_key),
                'gc_duration_key': str(gc_duration_blob_key),
                'gc_reclaimed_key': str(gc_reclaimed_blob_key),
                'memory_util_post_key': str(memory_util_post_blob_key)
            }

            template = jinja_environment.get_template(
                'static/templates/results.html')

        else:
            template_values = {
                'user': user,
                'logout': users.create_logout_url("/")
            }

            template = jinja_environment.get_template(
                'static/templates/error.html')

        self.response.out.write(template.render(template_values))
コード例 #6
0
 def setUp(self):
     self.parser = ParseGCLog()
     self.csv_writer = FileResultWriter()
コード例 #7
0
class ParseGCBlobTest(unittest.TestCase):

    path = os.path.dirname(os.path.abspath(__file__)) + "/"
    blobstore_dir = path + "testbed.blobstore"
    sample_file = path + "gc-sample.log"

    def setUp(self):
        # First, create an instance of the Testbed class.
        self.testbed = TestbedWithFiles()
        # Then activate the testbed, which prepares the service stubs for use.
        self.testbed.activate()
        # Next, declare which service stubs you want to use.
        self.testbed.init_blobstore_stub(self.blobstore_dir)

        self.parser = ParseGCLog()
        self.csv_writer = BlobResultWriter()

    def tearDown(self):
        self.testbed.deactivate()
        if os.path.isdir(self.blobstore_dir):
            shutil.rmtree(self.blobstore_dir)

    def _load_file_data(self, filename):
        file = open(filename, 'r')
        data = file.read()
        file.close()
        return data

    def _load_blob_data(self, blob_key):
        blob_info = blobstore.BlobInfo.get(blob_key)
        blob_reader = blob_info.open()
        data = blob_reader.read()
        blob_reader.close()
        return data

    def test_generate_memory_csv(self):
        expected_data = self._load_file_data(self.path + "expected_mem.csv")
        gc_data = self.parser.parse_file(self.sample_file)
        log_key = LogData(
            filename="memory.tmp",
            notes="notes").put()
        blob_key = graph.generate_cached_graph(log_key, 
            graph.YG_GC_MEMORY, 
            gc_data, 
            self.csv_writer)
        result_data = self._load_blob_data(blob_key)
        self.assertEqual(result_data, expected_data)

    def test_generate_gc_reclaimed_csv(self):
        expected_data = self._load_file_data(self.path + "expected_reclaimed.csv")
        gc_data = self.parser.parse_file(self.sample_file)
        log_key = LogData(
            filename="reclaimed.tmp",
            notes="notes").put()
        blob_key = graph.generate_cached_graph(log_key, 
            graph.MEMORY_RECLAIMED, 
            gc_data, 
            self.csv_writer)
        result_data = self._load_blob_data(blob_key)
        self.assertEqual(result_data, expected_data)

    def test_generate_gc_duration_csv(self):
        expected_data = self._load_file_data(self.path + "expected_duration.csv")
        gc_data = self.parser.parse_file(self.sample_file)
        log_key = LogData(
            filename="duration.tmp",
            notes="notes").put()
        blob_key = graph.generate_cached_graph(log_key, 
            graph.GC_DURATION, 
            gc_data, 
            self.csv_writer)
        result_data = self._load_blob_data(blob_key)
        self.assertEqual(result_data, expected_data)

    def test_gc_datastore(self):
        """Test storage & retrival of GC entries in datastore"""
        yg_gc = parsegc.generate_yg_gc_entry(
            "100.25",
            "100.25",
            "ParNew",
            "3",
            "2",
            "8",
            "0.12345",
            "8192",
            "5120",
            "16384",
            "3.12345",
            "1.5",
            "2.0",
            "3.1")

        full_gc = parsegc.generate_full_gc_entry(
            "200.5",
            "200.5",
            "Tenured",
            "20",
            "10",
            "40",
            "0.23456",
            "8192",
            "5120",
            "16384",
            "200",
            "100",
            "400",
            "3.1234",
            "1.9",
            "0.05",
            "3.11")

        # Items should be written to & returned from the store
        # ordered by timestamp
        gc_data = [yg_gc, full_gc]

        log_key = LogData(
            filename="test_gc_datastore.tmp",
            notes="notes").put()

        gc_datastore.store_data(log_key, gc_data)

        results = gc_datastore.get_data(log_key)

        self.assertEqual(results, gc_data)
        
        # Uncomment below to help debug failures
        """
コード例 #8
0
ファイル: parsegc_test.py プロジェクト: conor10/gc-analyser
 def setUp(self):
     self.parser = ParseGCLog()
コード例 #9
0
ファイル: parsegc_test.py プロジェクト: conor10/gc-analyser
class ParseGCTest(unittest.TestCase):
    """GC entry parsing test cases"""

    def setUp(self):
        self.parser = ParseGCLog()

    def test_yg_copy_old_msc_parse(self):
        """Single threaded YG copy collector with serial garbage collector

        -XX:+UseSerialGC
        """
        result = self.parser.parse(SERIAL_ENTRY1)
        expected = parsegc.generate_yg_gc_entry(
            "47.100",
            "47.100",
            "DefNew",
            "25472",
            "1143",
            "25472",
            "0.0103151",
            "66774",
            "45257",
            "81968",
            "0.0103716",
            "0.01",
            "0.00",
            "0.01",
        )
        self.assertEqual(result, expected)

        yg_result2 = self.parser.parse(SERIAL_YG1)
        yg_expected2 = parsegc.generate_yg_gc_entry(
            "1.321",
            "1.321",
            "DefNew",
            "80256",
            "9984",
            "90240",
            "0.2542700",
            "200471",
            "200470",
            "290600",
            "0.2543095",
            "0.22",
            "0.04",
            "0.26",
        )
        self.assertEqual(yg_result2, yg_expected2)

        # TODO SERIAL_YG2 entry

        full_result = self.parser.parse(SERIAL_FULL)
        full_expected = parsegc.generate_full_gc_entry(
            "26.256",
            "26.256",
            "Tenured",
            "349568",
            "349568",
            "349568",
            "1.3431030",
            "506815",
            "506815",
            "506816",
            "4607",
            "4607",
            "21248",
            "1.3431456",
            "1.31",
            "0.03",
            "1.34",
        )
        self.assertEqual(full_result, full_expected)

        system_result = self.parser.parse(SERIAL_SYSTEM)
        system_expected = parsegc.generate_full_gc_entry(
            "11.576",
            "11.576",
            "Tenured",
            "0",
            "336",
            "43712",
            "0.0123789",
            "1747",
            "336",
            "63360",
            "4612",
            "4612",
            "21248",
            "0.0132219",
            "0.01",
            "0.00",
            "0.01",
            "System",
        )
        self.assertEqual(system_result, system_expected)

    def test_young_ps_old_ps_parse(self):
        """Multi-threaded YG collector with throughput collector

        -XX:+UseParallelGC, auto-enabled with -XX:+UseParallelOldGC
        """
        result = self.parser.parse(PARALLEL_ENTRY1)
        expected = parsegc.generate_yg_gc_entry(
            "2.590",
            None,
            "PSYoungGen",
            "32768",
            "26736",
            "57344",
            None,
            "82018",
            "75986",
            "140416",
            "0.0292595",
            "0.08",
            "0.02",
            "0.03",
        )
        self.assertEqual(result, expected)

    def test_yg_parnew_old_msc_parse(self):
        """Multi-threaded YG collector with old MarkSweepCompact collector

        -XX:+UseParNewGC
        """
        yg_result = self.parser.parse(PARNEW_MSC_YG2)
        yg_expected = parsegc.generate_yg_gc_entry(
            "1.025",
            "1.025",
            "ParNew",
            "35840",
            "4480",
            "40320",
            "0.0930836",
            "89536",
            "88976",
            "129816",
            "0.0931276",
            "0.25",
            "0.03",
            "0.10",
        )
        self.assertEqual(yg_result, yg_expected)

        full_result = self.parser.parse(PARNEW_MSC_FULL)
        full_expected = parsegc.generate_full_gc_entry(
            "32.438",
            "32.438",
            "Tenured",
            "349567",
            "349567",
            "349568",
            "1.6792855",
            "506815",
            "506815",
            "506816",
            "4574",
            "4574",
            "21248",
            "1.6793288",
            "1.62",
            "0.05",
            "1.68",
        )
        self.assertEqual(full_result, full_expected)

        system_result = self.parser.parse(PARNEW_MSC_SYSTEM)
        system_expected = parsegc.generate_full_gc_entry(
            "11.033",
            "11.033",
            "Tenured",
            "0",
            "336",
            "43712",
            "0.0099557",
            "1747",
            "336",
            "63360",
            "4612",
            "4612",
            "21248",
            "0.0108454",
            "0.00",
            "0.00",
            "0.01",
            "System",
        )
        self.assertEqual(system_result, system_expected)

    def test_yg_parnew_old_cms_parse(self):
        """Multi-threaded YG collector with concurrent old generation 
        collector (CMS)

        -XX:+UseParNewGC, auto-enabled with -XX:+UseConcMarkSweepGC 
        """
        result = self.parser.parse(PAR_NEW_ENTRY1)
        expected = parsegc.generate_yg_gc_entry(
            "29.063",
            "29.063",
            "ParNew",
            "471872",
            "50601",
            "471872",
            "0.1122560",
            "2294220",
            "1911156",
            "4141888",
            "0.1127720",
            "2.47",
            "0.09",
            "0.12",
        )
        self.assertEqual(result, expected)

        yg_result = self.parser.parse(PARNEW_CMS_YG1)
        yg_expected = parsegc.generate_yg_gc_entry(
            "2.225",
            "2.225",
            "ParNew",
            "19134",
            "2110",
            "19136",
            "0.0619736",
            "395443",
            "395442",
            "412864",
            "0.0620169",
            "0.21",
            "0.01",
            "0.06",
        )
        self.assertEqual(yg_result, yg_expected)

        full_result = self.parser.parse(PARNEW_CMS_FULL)
        full_expected = parsegc.generate_full_gc_entry(
            "12.850",
            "12.850",
            "CMS",
            "458751",
            "458751",
            "458752",
            "2.2371750",
            "517759",
            "517722",
            "517760",
            "4619",
            "4609",
            "21248",
            "2.2372395",
            "2.17",
            "0.05",
            "2.23",
        )
        self.assertEqual(full_result, full_expected)

        system_result = self.parser.parse(PARNEW_CMS_SYSTEM)
        system_expected = parsegc.generate_full_gc_entry(
            "10.160",
            "10.479",
            "CMS",
            "0",
            "340",
            "63872",
            "0.0206751",
            "1735",
            "340",
            "83008",
            "4614",
            "4612",
            "21248",
            "0.0207745",
            "0.02",
            "0.00",
            "0.34",
            "System",
        )
        self.assertEqual(system_result, system_expected)

    def test_yg_copy_old_cms(self):
        """Young copy collector with old ConcurrentMarkSweep

        -XX:+UseConcMarkSweepGC -XX:-UseParNewGC
        """
        yg_result = self.parser.parse(COPY_CMS_YG1)
        yg_expected = parsegc.generate_yg_gc_entry(
            "1.438",
            "1.438",
            "DefNew",
            "19136",
            "2111",
            "19136",
            "0.1023744",
            "191163",
            "191162",
            "208192",
            "0.1024165",
            "0.10",
            "0.01",
            "0.10",
        )
        self.assertEqual(yg_result, yg_expected)

        full_result = self.parser.parse(COPY_CMS_FULL)
        full_expected = parsegc.generate_full_gc_entry(
            "6.497",
            "6.497",
            "CMS",
            "503039",
            "503040",
            "503040",
            "4.9805391",
            "522175",
            "522156",
            "522176",
            "4619",
            "4619",
            "21248",
            "4.9934847",
            "2.45",
            "0.21",
            "4.99",
        )
        self.assertEqual(full_result, full_expected)

        system_result = self.parser.parse(COPY_CMS_SYSTEM)
        system_expected = parsegc.generate_full_gc_entry(
            "10.377",
            "10.377",
            "CMS",
            "0",
            "340",
            "63872",
            "0.0161677",
            "1735",
            "340",
            "83008",
            "4614",
            "4612",
            "21248",
            "0.0162435",
            "0.02",
            "0.00",
            "0.02",
            "System",
        )
        self.assertEqual(system_result, system_expected)

    def test_young_ps_old_ps(self):
        """Young ParallelScavenge with old ParallelScavenge MarkSweep

        -XX:+UseParallelGC -XX:+UseParallelOldGC, turn adaptive sizeing on/off with -XX:+UseAdaptiveSizePolicy
        """
        yg_result = self.parser.parse(PARALLEL_MARKSWEEP_ADAPTIVE_YG1)
        yg_expected = parsegc.generate_yg_gc_entry(
            "4.607",
            None,
            "PSYoungGen",
            "83708",
            "58240",
            "116480",
            None,
            "351227",
            "351398",
            "466048",
            "0.2748461",
            "0.93",
            "0.04",
            "0.27",
        )
        self.assertEqual(yg_result, yg_expected)

        yg_result2 = self.parser.parse(PARALLEL_MARKSWEEP_NON_ADAPTIVE_YG1)
        yg_expected2 = parsegc.generate_yg_gc_entry(
            "0.285",
            None,
            "PSYoungGen",
            "16448",
            "2688",
            "19136",
            None,
            "55510",
            "54822",
            "71296",
            "0.0370065",
            "0.13",
            "0.01",
            "0.03",
        )
        self.assertEqual(yg_result2, yg_expected2)

        full_result = self.parser.parse(PARALLEL_MARKSWEEP_ADAPTIVE_FULL)
        full_expected = parsegc.generate_full_gc_entry(
            "5.257",
            None,
            "ParOldGen",
            "349566",
            "349567",
            "349568",
            None,
            "466046",
            "407805",
            "466048",
            "4574",
            "4574",
            "21248",
            "1.8929788",
            "6.03",
            "0.17",
            "1.89",
        )
        # [PSYoungGen: 116480K->58237K(116480K)]
        self.assertEqual(full_result, full_expected)

        full_result2 = self.parser.parse(PARALLEL_MARKSWEEP_NON_ADAPTIVE_FULL)
        full_expected2 = parsegc.generate_full_gc_entry(
            "0.322",
            None,
            "ParOldGen",
            "52134",
            "52156",
            "52160",
            None,
            "54822",
            "54738",
            "71296",
            "4572",
            "4570",
            "21248",
            "0.1916334",
            "0.54",
            "0.02",
            "0.20",
        )
        # [PSYoungGen: 2688K->2581K(19136K)]
        self.assertEqual(full_result2, full_expected2)

        system_result = self.parser.parse(PARALLEL_MARKSWEEP_ADAPTIVE_SYSTEM)
        system_expected = parsegc.generate_full_gc_entry(
            "10.295",
            None,
            "ParOldGen",
            "0",
            "336",
            "43712",
            None,
            "448",
            "336",
            "62848",
            "4612",
            "4611",
            "21248",
            "0.0106136",
            "0.02",
            "0.00",
            "0.02",
            "System",
        )
        # [PSYoungGen: 448K->0K(19136K)]
        self.assertEqual(system_result, system_expected)

        system_result2 = self.parser.parse(PARALLEL_MARKSWEEP_NON_ADAPTIVE_SYSTEM)
        system_expected2 = parsegc.generate_full_gc_entry(
            "10.207",
            None,
            "ParOldGen",
            "0",
            "336",
            "43712",
            None,
            "448",
            "336",
            "62848",
            "4612",
            "4611",
            "21248",
            "0.0090976",
            "0.01",
            "0.00",
            "0.01",
            "System",
        )
        # [PSYoungGen: 448K->0K(19136K)]
        self.assertEqual(system_result2, system_expected2)

    def test_invalid_entry(self):
        result = self.parser.parse(CMS_INITIAL_MARK)
        self.assertIsNone(result)
コード例 #10
0
class ParseGCBlobTest(unittest.TestCase):

    path = os.path.dirname(os.path.abspath(__file__)) + "/"
    blobstore_dir = path + "testbed.blobstore"
    sample_file = path + "gc-sample.log"

    def setUp(self):
        # First, create an instance of the Testbed class.
        self.testbed = TestbedWithFiles()
        # Then activate the testbed, which prepares the service stubs for use.
        self.testbed.activate()
        # Next, declare which service stubs you want to use.
        self.testbed.init_blobstore_stub(self.blobstore_dir)

        self.parser = ParseGCLog()
        self.csv_writer = BlobResultWriter()

    def tearDown(self):
        self.testbed.deactivate()
        if os.path.isdir(self.blobstore_dir):
            shutil.rmtree(self.blobstore_dir)

    def _load_file_data(self, filename):
        file = open(filename, 'r')
        data = file.read()
        file.close()
        return data

    def _load_blob_data(self, blob_key):
        blob_info = blobstore.BlobInfo.get(blob_key)
        blob_reader = blob_info.open()
        data = blob_reader.read()
        blob_reader.close()
        return data

    def test_generate_memory_csv(self):
        expected_data = self._load_file_data(self.path + "expected_mem.csv")
        gc_data = self.parser.parse_file(self.sample_file)
        log_key = LogData(filename="memory.tmp", notes="notes").put()
        blob_key = graph.generate_cached_graph(log_key, graph.YG_GC_MEMORY,
                                               gc_data, self.csv_writer)
        result_data = self._load_blob_data(blob_key)
        self.assertEqual(result_data, expected_data)

    def test_generate_gc_reclaimed_csv(self):
        expected_data = self._load_file_data(self.path +
                                             "expected_reclaimed.csv")
        gc_data = self.parser.parse_file(self.sample_file)
        log_key = LogData(filename="reclaimed.tmp", notes="notes").put()
        blob_key = graph.generate_cached_graph(log_key, graph.MEMORY_RECLAIMED,
                                               gc_data, self.csv_writer)
        result_data = self._load_blob_data(blob_key)
        self.assertEqual(result_data, expected_data)

    def test_generate_gc_duration_csv(self):
        expected_data = self._load_file_data(self.path +
                                             "expected_duration.csv")
        gc_data = self.parser.parse_file(self.sample_file)
        log_key = LogData(filename="duration.tmp", notes="notes").put()
        blob_key = graph.generate_cached_graph(log_key, graph.GC_DURATION,
                                               gc_data, self.csv_writer)
        result_data = self._load_blob_data(blob_key)
        self.assertEqual(result_data, expected_data)

    def test_gc_datastore(self):
        """Test storage & retrival of GC entries in datastore"""
        yg_gc = parsegc.generate_yg_gc_entry("100.25", "100.25", "ParNew", "3",
                                             "2", "8", "0.12345", "8192",
                                             "5120", "16384", "3.12345", "1.5",
                                             "2.0", "3.1")

        full_gc = parsegc.generate_full_gc_entry("200.5", "200.5", "Tenured",
                                                 "20", "10", "40", "0.23456",
                                                 "8192", "5120", "16384",
                                                 "200", "100", "400", "3.1234",
                                                 "1.9", "0.05", "3.11")

        # Items should be written to & returned from the store
        # ordered by timestamp
        gc_data = [yg_gc, full_gc]

        log_key = LogData(filename="test_gc_datastore.tmp",
                          notes="notes").put()

        gc_datastore.store_data(log_key, gc_data)

        results = gc_datastore.get_data(log_key)

        self.assertEqual(results, gc_data)

        # Uncomment below to help debug failures
        """
コード例 #11
0
 def setUp(self):
     self.parser = ParseGCLog()
     self.csv_writer = FileResultWriter()