예제 #1
0
    def on_benchmark_stop(self):
        logger.info("Analyzing merge times.")
        # first decompress all logs. They have unique names so it's safe to do that. It's easier to first decompress everything
        for log_file in os.listdir(self.node_log_dir):
            log_path = "%s/%s" % (self.node_log_dir, log_file)
            if io.is_archive(log_path):
                logger.info("Decompressing [%s] to analyze merge times..." %
                            log_path)
                io.decompress(log_path, self.node_log_dir)

        # we need to add up times from all files
        merge_times = {}
        for log_file in os.listdir(self.node_log_dir):
            log_path = "%s/%s" % (self.node_log_dir, log_file)
            if not io.is_archive(log_file):
                logger.debug("Analyzing merge times in [%s]" % log_path)
                with open(log_path, mode="rt", encoding="utf-8") as f:
                    self._extract_merge_times(f, merge_times)
            else:
                logger.debug("Skipping archived logs in [%s]." % log_path)
        if merge_times:
            self._store_merge_times(merge_times)
        logger.info(
            "Finished analyzing merge times. Extracted [%s] different merge time components."
            % len(merge_times))
예제 #2
0
파일: io_test.py 프로젝트: yodasantu/rally
 def test_archive(self):
     self.assertTrue(io.is_archive("/tmp/some-archive.tar.gz"))
     self.assertTrue(io.is_archive("/tmp/some-archive.tgz"))
     # Rally does not recognize .7z
     self.assertFalse(io.is_archive("/tmp/some-archive.7z"))
     self.assertFalse(io.is_archive("/tmp/some.log"))
     self.assertFalse(io.is_archive("some.log"))
예제 #3
0
 def test_archive(self):
     assert io.is_archive("/tmp/some-archive.tar.gz")
     assert io.is_archive("/tmp/some-archive.tgz")
     # Rally does not recognize .7z
     assert not io.is_archive("/tmp/some-archive.7z")
     assert not io.is_archive("/tmp/some.log")
     assert not io.is_archive("some.log")
예제 #4
0
파일: loader.py 프로젝트: sen0120/rally
    def _create_type(self, type_spec, mapping_dir):
        docs = self._r(type_spec, "documents", mandatory=False)
        if docs:
            if io.is_archive(docs):
                document_archive = docs
                document_file = io.splitext(docs)[0]
            else:
                document_archive = None
                document_file = docs
            number_of_documents = self._r(type_spec, "document-count")
            compressed_bytes = self._r(type_spec, "compressed-bytes", mandatory=False)
            uncompressed_bytes = self._r(type_spec, "uncompressed-bytes", mandatory=False)
        else:
            document_archive = None
            document_file = None
            number_of_documents = 0
            compressed_bytes = 0
            uncompressed_bytes = 0

        mapping_file = os.path.join(mapping_dir, self._r(type_spec, "mapping"))
        with self.source(mapping_file, "rt") as f:
            mapping = json.load(f)

        return track.Type(name=self._r(type_spec, "name"),
                          mapping=mapping,
                          document_file=document_file,
                          document_archive=document_archive,
                          includes_action_and_meta_data=self._r(type_spec, "includes-action-and-meta-data", mandatory=False,
                                                                default_value=False),
                          number_of_documents=number_of_documents,
                          compressed_size_in_bytes=compressed_bytes,
                          uncompressed_size_in_bytes=uncompressed_bytes)