def _InstallIpfw(self):
        ipfw_bin = binary_manager.FindPath('ipfw', self.GetArchName(),
                                           self.GetOSName())
        ipfw_mod = binary_manager.FindPath('ipfw_mod.ko', self.GetArchName(),
                                           self.GetOSName())

        try:
            changed = cloud_storage.GetIfChanged(ipfw_bin,
                                                 cloud_storage.INTERNAL_BUCKET)
            changed |= cloud_storage.GetIfChanged(
                ipfw_mod, cloud_storage.INTERNAL_BUCKET)
        except cloud_storage.CloudStorageError as e:
            logging.error(str(e))
            logging.error('You may proceed by manually building and installing'
                          'dummynet for your kernel. See: '
                          'http://info.iet.unipi.it/~luigi/dummynet/')
            sys.exit(1)

        if changed or not self.CanLaunchApplication('ipfw'):
            if not self._IsIpfwKernelModuleInstalled():
                subprocess.check_call(['/usr/bin/sudo', 'insmod', ipfw_mod])
            os.chmod(ipfw_bin, 0o755)
            subprocess.check_call(
                ['/usr/bin/sudo', 'cp', ipfw_bin, '/usr/local/sbin'])

        assert self.CanLaunchApplication('ipfw'), 'Failed to install ipfw. ' \
            'ipfw provided binaries are not supported for linux kernel < 3.13. ' \
            'You may proceed by manually building and installing dummynet for ' \
            'your kernel. See: http://info.iet.unipi.it/~luigi/dummynet/'
Exemplo n.º 2
0
    def testNoHashComputationNeededUponSecondCall(self, mock_get_locked,
                                                  mock_calculate_hash,
                                                  unused_get_locked):
        mock_calculate_hash.side_effect = _FakeCalulateHashNewHash
        cloud_storage.ReadHash = _FakeReadHash
        file_path = 'test-file-path.wpr'
        hash_path = file_path + '.sha1'

        def _FakeGetLocked(bucket, expected_hash, file_path):
            del bucket, expected_hash, file_path  # unused
            cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead

        mock_get_locked.side_effect = _FakeGetLocked

        self.CreateFiles([file_path, hash_path])
        # hash_path and file_path exist, and have different hashes. This first call
        # will invoke a fetch.
        self.assertTrue(
            cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET))

        # The fetch left a .fetchts file on machine.
        self.assertTrue(os.path.exists(file_path + '.fetchts'))

        # Subsequent invocations of GetIfChanged should not invoke CalculateHash.
        mock_calculate_hash.assert_not_called()
        self.assertFalse(
            cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET))
        self.assertFalse(
            cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET))
Exemplo n.º 3
0
 def testGetIfChanged(self, unused_lock_mock):
     orig_get = cloud_storage._GetLocked
     orig_read_hash = cloud_storage.ReadHash
     orig_calculate_hash = cloud_storage.CalculateHash
     cloud_storage.ReadHash = _FakeReadHash
     cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
     file_path = 'test-file-path.wpr'
     hash_path = file_path + '.sha1'
     try:
         cloud_storage._GetLocked = self._FakeGet
         # hash_path doesn't exist.
         self.assertFalse(
             cloud_storage.GetIfChanged(file_path,
                                        cloud_storage.PUBLIC_BUCKET))
         # hash_path exists, but file_path doesn't.
         self.CreateFiles([hash_path])
         self.assertTrue(
             cloud_storage.GetIfChanged(file_path,
                                        cloud_storage.PUBLIC_BUCKET))
         # hash_path and file_path exist, and have same hash.
         self.CreateFiles([file_path])
         self.assertFalse(
             cloud_storage.GetIfChanged(file_path,
                                        cloud_storage.PUBLIC_BUCKET))
         # hash_path and file_path exist, and have different hashes.
         cloud_storage.CalculateHash = _FakeCalulateHashNewHash
         self.assertTrue(
             cloud_storage.GetIfChanged(file_path,
                                        cloud_storage.PUBLIC_BUCKET))
     finally:
         cloud_storage._GetLocked = orig_get
         cloud_storage.CalculateHash = orig_calculate_hash
         cloud_storage.ReadHash = orig_read_hash
Exemplo n.º 4
0
    def testDisableCloudStorageIo(self, unused_lock_mock):
        os.environ['DISABLE_CLOUD_STORAGE_IO'] = '1'
        dir_path = 'real_dir_path'
        self.fs.CreateDirectory(dir_path)
        file_path = os.path.join(dir_path, 'file1')
        file_path_sha = file_path + '.sha1'

        def CleanTimeStampFile():
            os.remove(file_path + '.fetchts')

        self.CreateFiles([file_path, file_path_sha])
        with open(file_path_sha, 'w') as f:
            f.write('hash1234')
        with self.assertRaises(cloud_storage.CloudStorageIODisabled):
            cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1',
                               'remote_path2')
        with self.assertRaises(cloud_storage.CloudStorageIODisabled):
            cloud_storage.Get('bucket', 'foo', file_path)
        with self.assertRaises(cloud_storage.CloudStorageIODisabled):
            cloud_storage.GetIfChanged(file_path, 'foo')
        with self.assertRaises(cloud_storage.CloudStorageIODisabled):
            cloud_storage.GetIfHashChanged('bar', file_path, 'bucket',
                                           'hash1234')
        with self.assertRaises(cloud_storage.CloudStorageIODisabled):
            cloud_storage.Insert('bucket', 'foo', file_path)

        CleanTimeStampFile()
        with self.assertRaises(cloud_storage.CloudStorageIODisabled):
            cloud_storage.GetFilesInDirectoryIfChanged(dir_path, 'bucket')
Exemplo n.º 5
0
    def __init__(self,
                 url,
                 page_set=None,
                 base_dir=None,
                 name='',
                 credentials_path=None,
                 credentials_bucket=cloud_storage.PUBLIC_BUCKET,
                 labels=None,
                 startup_url='',
                 make_javascript_deterministic=True,
                 shared_page_state_class=shared_page_state.SharedPageState,
                 grouping_keys=None,
                 cache_temperature=cache_temperature_module.ANY,
                 traffic_setting=traffic_setting_module.NONE):
        self._url = url

        super(Page, self).__init__(
            shared_page_state_class,
            name=name,
            labels=labels,
            is_local=self._scheme in ['file', 'chrome', 'about'],
            make_javascript_deterministic=make_javascript_deterministic,
            grouping_keys=grouping_keys)

        self._page_set = page_set
        # Default value of base_dir is the directory of the file that defines the
        # class of this page instance.
        if base_dir is None:
            base_dir = os.path.dirname(inspect.getfile(self.__class__))
        self._base_dir = base_dir
        self._name = name
        if credentials_path:
            credentials_path = os.path.join(self._base_dir, credentials_path)
            cloud_storage.GetIfChanged(credentials_path, credentials_bucket)
            if not os.path.exists(credentials_path):
                logging.error('Invalid credentials path: %s' %
                              credentials_path)
                credentials_path = None
        self._credentials_path = credentials_path
        self._cache_temperature = cache_temperature
        if cache_temperature != cache_temperature_module.ANY:
            self.grouping_keys['cache_temperature'] = cache_temperature
        if traffic_setting != traffic_setting_module.NONE:
            self.grouping_keys['traffic_setting'] = traffic_setting

        assert traffic_setting in traffic_setting_module.NETWORK_CONFIGS, (
            'Invalid traffic setting: %s' % traffic_setting)
        self._traffic_setting = traffic_setting

        # Whether to collect garbage on the page before navigating & performing
        # page actions.
        self._collect_garbage_before_run = True

        # These attributes can be set dynamically by the page.
        self.synthetic_delays = dict()
        self._startup_url = startup_url
        self.credentials = None
        self.skip_waits = False
        self.script_to_evaluate_on_commit = None
        self._SchemeErrorCheck()
Exemplo n.º 6
0
    def _DownloadPregeneratedProfileArchive(self):
        """Download and extract the profile directory archive if one exists.

    On success, updates self._finder_options.browser_options.profile_dir with
    the directory of the extracted profile.
    """
        try:
            cloud_storage.GetIfChanged(self._pregenerated_profile_archive_dir,
                                       cloud_storage.PUBLIC_BUCKET)
        except (cloud_storage.CredentialsError,
                cloud_storage.PermissionError) as e:
            if os.path.exists(self._pregenerated_profile_archive_dir):
                # If the profile directory archive exists, assume the user has their
                # own local copy simply warn.
                logging.warning('Could not download Profile archive: %s',
                                self._pregenerated_profile_archive_dir)
            else:
                # If the archive profile directory doesn't exist, this is fatal.
                logging.error(
                    'Can not run without required profile archive: %s. '
                    'If you believe you have credentials, follow the '
                    'instructions below.',
                    self._pregenerated_profile_archive_dir)
                logging.error(str(e))
                sys.exit(-1)

        # Check to make sure the zip file exists.
        if not os.path.isfile(self._pregenerated_profile_archive_dir):
            raise Exception("Profile directory archive not downloaded: ",
                            self._pregenerated_profile_archive_dir)

        # The location to extract the profile into.
        self._unzipped_profile = tempfile.mkdtemp()
        profile_archive_path_basename = os.path.basename(
            self._pregenerated_profile_archive_dir)
        extracted_profile_dir_path = os.path.join(
            self._unzipped_profile,
            os.path.splitext(profile_archive_path_basename)[0])

        # Unzip profile directory.
        with zipfile.ZipFile(self._pregenerated_profile_archive_dir) as f:
            try:
                f.extractall(self._unzipped_profile)
            except Exception as e:
                # Cleanup any leftovers from unzipping.
                shutil.rmtree(self._unzipped_profile)
                logging.error(
                    "Error extracting profile directory zip file: %s", e)
                sys.exit(-1)

        if not os.path.exists(extracted_profile_dir_path):
            raise Exception("Failed to extract profile: ",
                            extracted_profile_dir_path)

        # Run with freshly extracted profile directory.
        logging.info("Using profile archive directory: %s",
                     extracted_profile_dir_path)
        self._finder_options.browser_options.profile_dir = (
            extracted_profile_dir_path)
Exemplo n.º 7
0
 def testGetPseudoLockUnavailableCausesTimeout(self):
     with tempfile.NamedTemporaryFile(
             suffix='.pseudo_lock') as pseudo_lock_fd:
         with lock.FileLock(pseudo_lock_fd, lock.LOCK_EX | lock.LOCK_NB):
             with self.assertRaises(py_utils.TimeoutException):
                 file_path = pseudo_lock_fd.name.replace('.pseudo_lock', '')
                 cloud_storage.GetIfChanged(file_path,
                                            cloud_storage.PUBLIC_BUCKET)
    def _InstallIpfw(self):
        ipfw_bin = binary_manager.FindPath('ipfw', self.GetArchName(),
                                           self.GetOSName())
        ipfw_mod = binary_manager.FindPath('ipfw_mod.ko', self.GetArchName(),
                                           self.GetOSName())

        try:
            changed = cloud_storage.GetIfChanged(ipfw_bin,
                                                 cloud_storage.INTERNAL_BUCKET)
            changed |= cloud_storage.GetIfChanged(
                ipfw_mod, cloud_storage.INTERNAL_BUCKET)
        except cloud_storage.CloudStorageError, e:
            logging.error(str(e))
            logging.error('You may proceed by manually building and installing'
                          'dummynet for your kernel. See: '
                          'http://info.iet.unipi.it/~luigi/dummynet/')
            sys.exit(1)
Exemplo n.º 9
0
    def testHashPathDoesNotExists(self, unused_get_locked, unused_lock_mock):
        cloud_storage.ReadHash = _FakeReadHash
        cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
        file_path = 'test-file-path.wpr'

        cloud_storage._GetLocked = self._FakeGet
        # hash_path doesn't exist.
        self.assertFalse(
            cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET))
Exemplo n.º 10
0
    def testRefetchingFileUponHashFileChange(self, mock_get_locked,
                                             mock_calculate_hash,
                                             unused_get_locked):
        mock_calculate_hash.side_effect = _FakeCalulateHashNewHash
        cloud_storage.ReadHash = _FakeReadHash
        file_path = 'test-file-path.wpr'
        hash_path = file_path + '.sha1'

        def _FakeGetLocked(bucket, expected_hash, file_path):
            del bucket, expected_hash, file_path  # unused
            cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead

        mock_get_locked.side_effect = _FakeGetLocked

        self.CreateFiles([file_path, hash_path])
        # hash_path and file_path exist, and have different hashes. This first call
        # will invoke a fetch.
        self.assertTrue(
            cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET))

        # The fetch left a .fetchts file on machine.
        self.assertTrue(os.path.exists(file_path + '.fetchts'))

        with open(file_path + '.fetchts') as f:
            fetchts = float(f.read())

        # Updating the .sha1 hash_path file with the new hash after .fetchts
        # is created.
        file_obj = self.fs.GetObject(hash_path)
        file_obj.SetMTime(fetchts + 100)

        cloud_storage.ReadHash = lambda _: 'hashNeW'

        def _FakeGetLockedNewHash(bucket, expected_hash, file_path):
            del bucket, expected_hash, file_path  # unused
            cloud_storage.CalculateHash = lambda _: 'hashNeW'

        mock_get_locked.side_effect = _FakeGetLockedNewHash

        # hash_path and file_path exist, and have different hashes. This first call
        # will invoke a fetch.
        self.assertTrue(
            cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET))
Exemplo n.º 11
0
    def testHashPathAndFileHashExistWithSameHash(self, unused_get_locked,
                                                 unused_lock_mock):
        cloud_storage.ReadHash = _FakeReadHash
        cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
        file_path = 'test-file-path.wpr'

        # hash_path and file_path exist, and have same hash.
        self.CreateFiles([file_path])
        self.assertFalse(
            cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET))
Exemplo n.º 12
0
    def testHashPathExistsButFilePathDoesNot(self, unused_get_locked,
                                             unused_lock_mock):
        cloud_storage.ReadHash = _FakeReadHash
        cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
        file_path = 'test-file-path.wpr'
        hash_path = file_path + '.sha1'

        # hash_path exists, but file_path doesn't.
        self.CreateFiles([hash_path])
        self.assertTrue(
            cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET))
Exemplo n.º 13
0
 def testGetGlobalLockUnavailableCausesTimeout(self):
     with open(_CLOUD_STORAGE_GLOBAL_LOCK_PATH) as global_lock_fd:
         with lock.FileLock(global_lock_fd, lock.LOCK_EX | lock.LOCK_NB):
             tmp_dir = tempfile.mkdtemp()
             try:
                 file_path = os.path.join(tmp_dir, 'foo')
                 with self.assertRaises(py_utils.TimeoutException):
                     cloud_storage.GetIfChanged(file_path,
                                                cloud_storage.PUBLIC_BUCKET)
             finally:
                 shutil.rmtree(tmp_dir)
Exemplo n.º 14
0
 def SetUpProcess(cls):
   options = cls.GetParsedCommandLineOptions()
   color_profile_manager.ForceUntilExitSRGB(
     options.dont_restore_color_profile_after_test)
   super(MapsIntegrationTest, cls).SetUpProcess()
   browser_args = [
       '--force-color-profile=srgb',
       '--ensure-forced-color-profile']
   cls.CustomizeBrowserArgs(browser_args)
   cloud_storage.GetIfChanged(
     os.path.join(maps_perf_test_path, 'load_dataset'),
     cloud_storage.PUBLIC_BUCKET)
   cls.SetStaticServerDirs([maps_perf_test_path])
   cls.StartBrowser()
Exemplo n.º 15
0
 def SetUpProcess(cls):
     options = cls.GetParsedCommandLineOptions()
     color_profile_manager.ForceUntilExitSRGB(
         options.dont_restore_color_profile_after_test)
     super(MapsIntegrationTest, cls).SetUpProcess()
     cls.CustomizeBrowserArgs([
         cba.FORCE_COLOR_PROFILE_SRGB,
         cba.ENSURE_FORCED_COLOR_PROFILE,
     ])
     cloud_storage.GetIfChanged(
         os.path.join(_MAPS_PERF_TEST_PATH, 'load_dataset'),
         cloud_storage.PUBLIC_BUCKET)
     cls.SetStaticServerDirs([_MAPS_PERF_TEST_PATH])
     cls.StartBrowser()
Exemplo n.º 16
0
  def StartWPRServer(cls, archive_path=None, archive_bucket=None):
    """Start a webpage replay server.

    Args:
      archive_path: Path to the WPR file. If there is a corresponding sha1 file,
          this archive will be automatically downloaded from Google Storage.
      archive_bucket: The bucket to look for the WPR archive.
    """
    assert cls._browser_options, (
        'Browser options must be set with |SetBrowserOptions| prior to '
        'starting WPR')
    assert not cls.browser, 'WPR must be started prior to browser being started'

    cloud_storage.GetIfChanged(archive_path, archive_bucket)
    cls.platform.network_controller.StartReplay(archive_path)
Exemplo n.º 17
0
 def download_if_needed(path):
   try:
     cloud_storage.GetIfChanged(path, self._bucket)
   except (cloud_storage.CredentialsError, cloud_storage.PermissionError):
     if os.path.exists(path):
       # If the archive exists, assume the user recorded their own and warn
       # them that they do not have the proper credentials to download.
       logging.warning('Need credentials to update WPR archive: %s', path)
     else:
       logging.error("You either aren't authenticated or don't have "
                     "permission to use the archives for this page set."
                     "\nYou may need to run gsutil config."
                     "\nYou can find instructions for gsutil config at: "
                     "http://www.chromium.org/developers/telemetry/"
                     "upload_to_cloud_storage")
       raise
Exemplo n.º 18
0
    def testHashPathAndFileHashExistWithDifferentHash(self, mock_get_locked,
                                                      unused_get_locked):
        cloud_storage.ReadHash = _FakeReadHash
        cloud_storage.CalculateHash = _FakeCalulateHashNewHash
        file_path = 'test-file-path.wpr'
        hash_path = file_path + '.sha1'

        def _FakeGetLocked(bucket, expected_hash, file_path):
            del bucket, expected_hash, file_path  # unused
            cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead

        mock_get_locked.side_effect = _FakeGetLocked

        self.CreateFiles([file_path, hash_path])
        # hash_path and file_path exist, and have different hashes.
        self.assertTrue(
            cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET))
Exemplo n.º 19
0
def _InstallWinRing0():
    """WinRing0 is used for reading MSRs."""
    executable_dir = os.path.dirname(sys.executable)

    python_is_64_bit = sys.maxsize > 2**32
    dll_file_name = 'WinRing0x64.dll' if python_is_64_bit else 'WinRing0.dll'
    dll_path = os.path.join(executable_dir, dll_file_name)

    os_is_64_bit = platform.machine().endswith('64')
    driver_file_name = 'WinRing0x64.sys' if os_is_64_bit else 'WinRing0.sys'
    driver_path = os.path.join(executable_dir, driver_file_name)

    # Check for WinRing0 and download if needed.
    if not (os.path.exists(dll_path) and os.path.exists(driver_path)):
        win_binary_dir = os.path.join(path.GetTelemetryDir(), 'bin', 'win',
                                      'AMD64')
        zip_path = os.path.join(win_binary_dir, 'winring0.zip')
        cloud_storage.GetIfChanged(zip_path,
                                   bucket=cloud_storage.PUBLIC_BUCKET)
        try:
            with zipfile.ZipFile(zip_path, 'r') as zip_file:
                error_message = (
                    'Failed to extract %s into %s. If python claims that '
                    'the zip file is locked, this may be a lie. The problem may be '
                    'that python does not have write permissions to the destination '
                    'directory.')
                # Install DLL.
                if not os.path.exists(dll_path):
                    try:
                        zip_file.extract(dll_file_name, executable_dir)
                    except:
                        logging.error(error_message %
                                      (dll_file_name, executable_dir))
                        raise

                # Install kernel driver.
                if not os.path.exists(driver_path):
                    try:
                        zip_file.extract(driver_file_name, executable_dir)
                    except:
                        logging.error(error_message %
                                      (driver_file_name, executable_dir))
                        raise
        finally:
            os.remove(zip_path)
Exemplo n.º 20
0
def GetAccountNameAndPassword(credential,
                              credentials_path=DEFAULT_CREDENTIAL_PATH):
    """Returns username and password for |credential| in credentials_path file.

  Args:
    credential: The credential to retrieve from the file (type string).
    credentials_path: The string that specifies the path to credential file.

  Returns:
    A tuple (username, password) in which both are username and password
    strings.
  """
    if (credentials_path == DEFAULT_CREDENTIAL_PATH
            and not os.path.exists(DEFAULT_CREDENTIAL_PATH)):
        cloud_storage.GetIfChanged(DEFAULT_CREDENTIAL_PATH,
                                   DEFAULT_CREDENTIAL_BUCKET)

    with open(credentials_path, 'r') as f:
        credentials = json.load(f)
    c = credentials.get(credential)
    return c['username'], c['password']
Exemplo n.º 21
0
  def DownloadArchivesIfNeeded(self):
    """Downloads archives iff the Archive has a bucket parameter and the user
    has permission to access the bucket.

    Raises cloud storage Permissions or Credentials error when there is no
    local copy of the archive and the user doesn't have permission to access
    the archive's bucket.

    Warns when a bucket is not specified or when the user doesn't have
    permission to access the archive's bucket but a local copy of the archive
    exists.
    """
    # Download all .wpr files.
    if not self._bucket:
      logging.warning('Story set in %s has no bucket specified, and '
                      'cannot be downloaded from cloud_storage.', )
      return
    assert 'archives' in self._data, 'Invalid data format in %s. \'archives\'' \
                                     ' field is needed' % self._file_path
    for archive_path in self._data['archives']:
      archive_path = self._WprFileNameToPath(archive_path)
      try:
        cloud_storage.GetIfChanged(archive_path, self._bucket)
      except (cloud_storage.CredentialsError, cloud_storage.PermissionError):
        if os.path.exists(archive_path):
          # If the archive exists, assume the user recorded their own and
          # simply warn.
          logging.warning('Need credentials to update WPR archive: %s',
                          archive_path)
        else:
          logging.error("You either aren't authenticated or don't have "
                        "permission to use the archives for this page set."
                        "\nYou may need to run gsutil config."
                        "\nYou can find instructions for gsutil config at: "
                        "http://www.chromium.org/developers/telemetry/"
                        "upload_to_cloud_storage")
          raise
Exemplo n.º 22
0
def _DownloadFromCloudStorage(path):
    print 'Downloading %s from gcs.' % (path)
    cloud_storage.GetIfChanged(path, cloud_storage.PARTNER_BUCKET)
Exemplo n.º 23
0
    def testPseudoLockTimeout(self):
        self.fs.CreateFile('/tmp/test-file-path.wpr.pseudo_lock')
        file_path = '/tmp/test-file-path.wpr'

        with self.assertRaises(py_utils.TimeoutException):
            cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)