コード例 #1
0
    def test_write_run_info(self):
        with temporary_file_path() as tmppath:
            ri = RunInfo(tmppath)
            ri.add_info('key1', 'val1')
            ri.add_infos(('key2', ' val2'), (' key3 ', 'val3 '))
            self.assertEquals({
                'key1': 'val1',
                'key2': 'val2',
                'key3': 'val3'
            }, ri.get_as_dict())

            with open(tmppath, 'r') as tmpfile:
                contents = tmpfile.read()
            self.assertEquals('key1: val1\nkey2: val2\nkey3: val3\n', contents)
コード例 #2
0
    def test_run_info_read(self):
        with temporary_file_path() as tmppath:
            with open(tmppath, 'w') as tmpfile:
                tmpfile.write('foo:bar\n baz :qux quux')
            ri = RunInfo(tmppath)
            self.assertEquals(ri.path(), tmppath)

            # Test get_info access.
            self.assertEquals(ri.get_info('foo'), 'bar')
            self.assertEquals(ri.get_info('baz'), 'qux quux')
            self.assertIsNone(ri.get_info('nonexistent'))

            # Test dict-like access.
            self.assertEquals(ri['foo'], 'bar')
            self.assertEquals(ri['baz'], 'qux quux')
コード例 #3
0
    def __init__(self, config):
        self.run_timestamp = time.time(
        )  # A double, so we get subsecond precision for ids.
        cmd_line = ' '.join(['./pants'] + sys.argv[1:])

        # run_id is safe for use in paths.
        millis = (self.run_timestamp * 1000) % 1000
        run_id = 'pants_run_%s_%d' % \
                 (time.strftime('%Y_%m_%d_%H_%M_%S', time.localtime(self.run_timestamp)), millis)

        self.info_dir = os.path.join(config.getdefault('info_dir'), run_id)
        self.run_info = RunInfo(os.path.join(self.info_dir, 'info'))
        self.run_info.add_basic_info(run_id, self.run_timestamp)
        self.run_info.add_info('cmd_line', cmd_line)
        self.stats_url = config.getdefault('stats_upload_url', default=None)

        # Create a 'latest' symlink, after we add_infos, so we're guaranteed that the file exists.
        link_to_latest = os.path.join(os.path.dirname(self.info_dir), 'latest')
        if os.path.exists(link_to_latest):
            os.unlink(link_to_latest)
        os.symlink(self.info_dir, link_to_latest)

        # Time spent in a workunit, including its children.
        self.cumulative_timings = AggregatedTimings(
            os.path.join(self.info_dir, 'cumulative_timings'))

        # Time spent in a workunit, not including its children.
        self.self_timings = AggregatedTimings(
            os.path.join(self.info_dir, 'self_timings'))

        # Hit/miss stats for the artifact cache.
        self.artifact_cache_stats = \
          ArtifactCacheStats(os.path.join(self.info_dir, 'artifact_cache_stats'))

        # We report to this Report.
        self.report = None

        # The workunit representing the entire pants run.
        self.root_workunit = None

        # The workunit we're currently executing.
        # TODO: What does this mean when executing multiple workunits in parallel?
        self._current_workunit = None
コード例 #4
0
ファイル: run_tracker.py プロジェクト: testvidya11/commons
    def __init__(self,
                 info_dir,
                 stats_upload_url=None,
                 num_foreground_workers=8,
                 num_background_workers=8):
        self.run_timestamp = time.time(
        )  # A double, so we get subsecond precision for ids.
        cmd_line = ' '.join(['./pants'] + sys.argv[1:])

        # run_id is safe for use in paths.
        millis = (self.run_timestamp * 1000) % 1000
        run_id = 'pants_run_%s_%d' % \
                 (time.strftime('%Y_%m_%d_%H_%M_%S', time.localtime(self.run_timestamp)), millis)

        self.info_dir = os.path.join(info_dir, run_id)
        self.run_info = RunInfo(os.path.join(self.info_dir, 'info'))
        self.run_info.add_basic_info(run_id, self.run_timestamp)
        self.run_info.add_info('cmd_line', cmd_line)
        self.stats_url = stats_upload_url

        # Create a 'latest' symlink, after we add_infos, so we're guaranteed that the file exists.
        link_to_latest = os.path.join(os.path.dirname(self.info_dir), 'latest')
        if os.path.exists(link_to_latest):
            os.unlink(link_to_latest)
        os.symlink(self.info_dir, link_to_latest)

        # Time spent in a workunit, including its children.
        self.cumulative_timings = AggregatedTimings(
            os.path.join(self.info_dir, 'cumulative_timings'))

        # Time spent in a workunit, not including its children.
        self.self_timings = AggregatedTimings(
            os.path.join(self.info_dir, 'self_timings'))

        # Hit/miss stats for the artifact cache.
        self.artifact_cache_stats = \
          ArtifactCacheStats(os.path.join(self.info_dir, 'artifact_cache_stats'))

        # Number of threads for foreground work.
        self._num_foreground_workers = num_foreground_workers

        # Number of threads for background work.
        self._num_background_workers = num_background_workers

        # We report to this Report.
        self.report = None

        # self._threadlocal.current_workunit contains the current workunit for the calling thread.
        # Note that multiple threads may share a name (e.g., all the threads in a pool).
        self._threadlocal = threading.local()

        # For main thread work. Created on start().
        self._main_root_workunit = None

        # For concurrent foreground work.  Created lazily if needed.
        # Associated with the main thread's root workunit.
        self._foreground_worker_pool = None

        # For background work.  Created lazily if needed.
        self._background_worker_pool = None
        self._background_root_workunit = None

        self._aborted = False