Esempio n. 1
0
    def testGetFilesInDirectoryIfChanged(self):
        stubs = system_stub.Override(cloud_storage, ['os'])
        stubs.os._directory = {
            'dir1': ['1file1.sha1', '1file2.txt', '1file3.sha1'],
            'dir2': ['2file.txt'],
            'dir3': ['3file1.sha1']
        }
        stubs.os.path.dirs = ['real_dir_path']

        def IncrementFilesUpdated(*_):
            IncrementFilesUpdated.files_updated += 1

        IncrementFilesUpdated.files_updated = 0
        orig_get_if_changed = cloud_storage.GetIfChanged
        cloud_storage.GetIfChanged = IncrementFilesUpdated
        try:
            self.assertRaises(ValueError,
                              cloud_storage.GetFilesInDirectoryIfChanged,
                              os.path.abspath(os.sep),
                              cloud_storage.PUBLIC_BUCKET)
            self.assertEqual(0, IncrementFilesUpdated.files_updated)
            self.assertRaises(ValueError,
                              cloud_storage.GetFilesInDirectoryIfChanged,
                              'fake_dir_path', cloud_storage.PUBLIC_BUCKET)
            self.assertEqual(0, IncrementFilesUpdated.files_updated)
            cloud_storage.GetFilesInDirectoryIfChanged(
                'real_dir_path', cloud_storage.PUBLIC_BUCKET)
            self.assertEqual(3, IncrementFilesUpdated.files_updated)
        finally:
            cloud_storage.GetIfChanged = orig_get_if_changed
            stubs.Restore()
Esempio n. 2
0
    def testGetFilesInDirectoryIfChanged(self):
        self.CreateFiles([
            'real_dir_path/dir1/1file1.sha1', 'real_dir_path/dir1/1file2.txt',
            'real_dir_path/dir1/1file3.sha1', 'real_dir_path/dir2/2file.txt',
            'real_dir_path/dir3/3file1.sha1'
        ])

        def IncrementFilesUpdated(*_):
            IncrementFilesUpdated.files_updated += 1

        IncrementFilesUpdated.files_updated = 0
        orig_get_if_changed = cloud_storage.GetIfChanged
        cloud_storage.GetIfChanged = IncrementFilesUpdated
        try:
            self.assertRaises(ValueError,
                              cloud_storage.GetFilesInDirectoryIfChanged,
                              os.path.abspath(os.sep),
                              cloud_storage.PUBLIC_BUCKET)
            self.assertEqual(0, IncrementFilesUpdated.files_updated)
            self.assertRaises(ValueError,
                              cloud_storage.GetFilesInDirectoryIfChanged,
                              'fake_dir_path', cloud_storage.PUBLIC_BUCKET)
            self.assertEqual(0, IncrementFilesUpdated.files_updated)
            cloud_storage.GetFilesInDirectoryIfChanged(
                'real_dir_path', cloud_storage.PUBLIC_BUCKET)
            self.assertEqual(3, IncrementFilesUpdated.files_updated)
        finally:
            cloud_storage.GetIfChanged = orig_get_if_changed
Esempio n. 3
0
def _FetchDependenciesIfNeeded(story_set):
    """ Download files needed by a user story set. """
    # Download files in serving_dirs.
    serving_dirs = story_set.serving_dirs
    for directory in serving_dirs:
        cloud_storage.GetFilesInDirectoryIfChanged(directory, story_set.bucket)

    # Download WPR files.
    if any(not story.is_local for story in story_set):
        story_set.wpr_archive_info.DownloadArchivesIfNeeded()
Esempio n. 4
0
 def testDisableCloudStorageIo(self, unused_lock_mock):
     os.environ['DISABLE_CLOUD_STORAGE_IO'] = '1'
     dir_path = 'real_dir_path'
     self.fs.CreateDirectory(dir_path)
     file_path = os.path.join(dir_path, 'file1')
     file_path_sha = file_path + '.sha1'
     self.CreateFiles([file_path, file_path_sha])
     with open(file_path_sha, 'w') as f:
         f.write('hash1234')
     with self.assertRaises(cloud_storage.CloudStorageIODisabled):
         cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1',
                            'remote_path2')
     with self.assertRaises(cloud_storage.CloudStorageIODisabled):
         cloud_storage.Get('bucket', 'foo', file_path)
     with self.assertRaises(cloud_storage.CloudStorageIODisabled):
         cloud_storage.GetIfChanged(file_path, 'foo')
     with self.assertRaises(cloud_storage.CloudStorageIODisabled):
         cloud_storage.GetIfHashChanged('bar', file_path, 'bucket',
                                        'hash1234')
     with self.assertRaises(cloud_storage.CloudStorageIODisabled):
         cloud_storage.Insert('bucket', 'foo', file_path)
     with self.assertRaises(cloud_storage.CloudStorageIODisabled):
         cloud_storage.GetFilesInDirectoryIfChanged(dir_path, 'bucket')
Esempio n. 5
0
def Run(test, story_set, finder_options, results, max_failures=None):
  """Runs a given test against a given page_set with the given options.

  Stop execution for unexpected exceptions such as KeyboardInterrupt.
  We "white list" certain exceptions for which the story runner
  can continue running the remaining stories.
  """
  # Filter page set based on options.
  stories = filter(story_module.StoryFilter.IsSelected, story_set)

  if (not finder_options.use_live_sites and story_set.bucket and
      finder_options.browser_options.wpr_mode != wpr_modes.WPR_RECORD):
    serving_dirs = story_set.serving_dirs
    for directory in serving_dirs:
      cloud_storage.GetFilesInDirectoryIfChanged(directory,
                                                 story_set.bucket)
    if not _UpdateAndCheckArchives(
        story_set.archive_data_file, story_set.wpr_archive_info,
        stories):
      return

  if not stories:
    return

  # Effective max failures gives priority to command-line flag value.
  effective_max_failures = finder_options.max_failures
  if effective_max_failures is None:
    effective_max_failures = max_failures

  story_groups = StoriesGroupedByStateClass(
      stories,
      story_set.allow_mixed_story_states)

  for group in story_groups:
    state = None
    try:
      for _ in xrange(finder_options.pageset_repeat):
        for story in group.stories:
          for _ in xrange(finder_options.page_repeat):
            if not state:
              state = group.shared_state_class(
                  test, finder_options, story_set)
            results.WillRunPage(story)
            try:
              _WaitForThermalThrottlingIfNeeded(state.platform)
              _RunStoryAndProcessErrorIfNeeded(story, results, state, test)
            except exceptions.Error:
              # Catch all Telemetry errors to give the story a chance to retry.
              # The retry is enabled by tearing down the state and creating
              # a new state instance in the next iteration.
              try:
                # If TearDownState raises, do not catch the exception.
                # (The Error was saved as a failure value.)
                state.TearDownState()
              finally:
                # Later finally-blocks use state, so ensure it is cleared.
                state = None
            finally:
              has_existing_exception = sys.exc_info() != (None, None, None)
              try:
                if state:
                  _CheckThermalThrottling(state.platform)
                results.DidRunPage(story)
              except Exception:
                if not has_existing_exception:
                  raise
                # Print current exception and propagate existing exception.
                exception_formatter.PrintFormattedException(
                    msg='Exception from result processing:')
          if (effective_max_failures is not None and
              len(results.failures) > effective_max_failures):
            logging.error('Too many failures. Aborting.')
            return
    finally:
      if state:
        has_existing_exception = sys.exc_info() != (None, None, None)
        try:
          state.TearDownState()
        except Exception:
          if not has_existing_exception:
            raise
          # Print current exception and propagate existing exception.
          exception_formatter.PrintFormattedException(
              msg='Exception from TearDownState:')