def testGetIfChanged(self, unused_lock_mock): orig_get = cloud_storage._GetLocked orig_read_hash = cloud_storage.ReadHash orig_calculate_hash = cloud_storage.CalculateHash cloud_storage.ReadHash = _FakeReadHash cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead file_path = 'test-file-path.wpr' hash_path = file_path + '.sha1' try: cloud_storage._GetLocked = self._FakeGet # hash_path doesn't exist. self.assertFalse( cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)) # hash_path exists, but file_path doesn't. self.CreateFiles([hash_path]) self.assertTrue( cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)) # hash_path and file_path exist, and have same hash. self.CreateFiles([file_path]) self.assertFalse( cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)) # hash_path and file_path exist, and have different hashes. cloud_storage.CalculateHash = _FakeCalulateHashNewHash self.assertTrue( cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)) finally: cloud_storage._GetLocked = orig_get cloud_storage.CalculateHash = orig_calculate_hash cloud_storage.ReadHash = orig_read_hash
def testGetIfChanged(self): stubs = system_stub.Override(cloud_storage, ['os', 'open']) stubs.open.files[_FakeFindGsutil()] = '' orig_get = cloud_storage.Get orig_read_hash = cloud_storage.ReadHash orig_calculate_hash = cloud_storage.CalculateHash cloud_storage.ReadHash = _FakeReadHash cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead file_path = 'test-file-path.wpr' hash_path = file_path + '.sha1' try: cloud_storage.Get = self._FakeGet # hash_path doesn't exist. self.assertFalse(cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)) # hash_path exists, but file_path doesn't. stubs.os.path.files.append(hash_path) self.assertTrue(cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)) # hash_path and file_path exist, and have same hash. stubs.os.path.files.append(file_path) self.assertFalse(cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)) # hash_path and file_path exist, and have different hashes. cloud_storage.CalculateHash = _FakeCalulateHashNewHash self.assertTrue(cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)) finally: stubs.Restore() cloud_storage.Get = orig_get cloud_storage.CalculateHash = orig_calculate_hash cloud_storage.ReadHash = orig_read_hash
def FindPath(binary_name, arch_name, platform_name): """Returns the path to the given binary name, pulling from the cloud if necessary.""" if platform_name == 'win': binary_name += '.exe' command = FindLocallyBuiltPath(binary_name) if not command and _IsInCloudStorage(binary_name, arch_name, platform_name): cloud_storage.GetIfChanged( _GetBinPath(binary_name, arch_name, platform_name), cloud_storage.PUBLIC_BUCKET) command = _GetBinPath(binary_name, arch_name, platform_name) # Ensure the downloaded file is actually executable. if command and os.path.exists(command): os.chmod(command, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP) if not command and platform_name == 'win' and arch_name != 'AMD64': # This is an awful, awful hack to temporarily fix cloud_storage downloads # on XP until the binary_manager is finished and migrated to. # Please don't try this at home. return FindPath(binary_name, 'AMD64', platform_name) # Return an absolute path consistently. if command: command = os.path.abspath(command) return command
def _DownloadPregeneratedProfileArchive(self): """Download and extract the profile directory archive if one exists. On success, updates self._finder_options.browser_options.profile_dir with the directory of the extracted profile. """ # Download profile directory from cloud storage. test_data_dir = os.path.join(util.GetChromiumSrcDir(), 'tools', 'perf', 'generated_profiles', self._possible_browser.target_os) archive_name = self.GetPregeneratedProfileArchive() generated_profile_archive_path = os.path.normpath( os.path.join(test_data_dir, archive_name)) try: cloud_storage.GetIfChanged(generated_profile_archive_path, cloud_storage.PUBLIC_BUCKET) except (cloud_storage.CredentialsError, cloud_storage.PermissionError) as e: if os.path.exists(generated_profile_archive_path): # If the profile directory archive exists, assume the user has their # own local copy simply warn. logging.warning('Could not download Profile archive: %s', generated_profile_archive_path) else: # If the archive profile directory doesn't exist, this is fatal. logging.error( 'Can not run without required profile archive: %s. ' 'If you believe you have credentials, follow the ' 'instructions below.', generated_profile_archive_path) logging.error(str(e)) sys.exit(-1) # Check to make sure the zip file exists. if not os.path.isfile(generated_profile_archive_path): raise Exception("Profile directory archive not downloaded: ", generated_profile_archive_path) # The location to extract the profile into. extracted_profile_dir_path = ( os.path.splitext(generated_profile_archive_path)[0]) # Unzip profile directory. with zipfile.ZipFile(generated_profile_archive_path) as f: try: f.extractall(os.path.dirname(generated_profile_archive_path)) except e: # Cleanup any leftovers from unzipping. if os.path.exists(extracted_profile_dir_path): shutil.rmtree(extracted_profile_dir_path) logging.error( "Error extracting profile directory zip file: %s", e) sys.exit(-1) # Run with freshly extracted profile directory. logging.info("Using profile archive directory: %s", extracted_profile_dir_path) self._finder_options.browser_options.profile_dir = ( extracted_profile_dir_path)
def _InstallIpfw(self): ipfw_bin = binary_manager.FindPath('ipfw', self.GetArchName(), self.GetOSName()) ipfw_mod = binary_manager.FindPath('ipfw_mod.ko', self.GetArchName(), self.GetOSName()) try: changed = cloud_storage.GetIfChanged(ipfw_bin, cloud_storage.INTERNAL_BUCKET) changed |= cloud_storage.GetIfChanged( ipfw_mod, cloud_storage.INTERNAL_BUCKET) except cloud_storage.CloudStorageError, e: logging.error(str(e)) logging.error('You may proceed by manually building and installing' 'dummynet for your kernel. See: ' 'http://info.iet.unipi.it/~luigi/dummynet/') sys.exit(1)
def _DownloadPregeneratedProfileArchive(self): """Download and extract the profile directory archive if one exists. On success, updates self._finder_options.browser_options.profile_dir with the directory of the extracted profile. """ try: cloud_storage.GetIfChanged(self._pregenerated_profile_archive_dir, cloud_storage.PUBLIC_BUCKET) except (cloud_storage.CredentialsError, cloud_storage.PermissionError) as e: if os.path.exists(self._pregenerated_profile_archive_dir): # If the profile directory archive exists, assume the user has their # own local copy simply warn. logging.warning('Could not download Profile archive: %s', self._pregenerated_profile_archive_dir) else: # If the archive profile directory doesn't exist, this is fatal. logging.error('Can not run without required profile archive: %s. ' 'If you believe you have credentials, follow the ' 'instructions below.', self._pregenerated_profile_archive_dir) logging.error(str(e)) sys.exit(-1) # Check to make sure the zip file exists. if not os.path.isfile(self._pregenerated_profile_archive_dir): raise Exception("Profile directory archive not downloaded: ", self._pregenerated_profile_archive_dir) # The location to extract the profile into. self._unzipped_profile = tempfile.mkdtemp() profile_archive_path_basename = os.path.basename( self._pregenerated_profile_archive_dir) extracted_profile_dir_path = os.path.join( self._unzipped_profile, os.path.splitext(profile_archive_path_basename)[0]) # Unzip profile directory. with zipfile.ZipFile(self._pregenerated_profile_archive_dir) as f: try: f.extractall(self._unzipped_profile) except Exception as e: # Cleanup any leftovers from unzipping. shutil.rmtree(self._unzipped_profile) logging.error("Error extracting profile directory zip file: %s", e) sys.exit(-1) if not os.path.exists(extracted_profile_dir_path): raise Exception("Failed to extract profile: ", extracted_profile_dir_path) # Run with freshly extracted profile directory. logging.info("Using profile archive directory: %s", extracted_profile_dir_path) self._finder_options.browser_options.profile_dir = ( extracted_profile_dir_path)
def __init__(self, url, page_set=None, base_dir=None, name='', credentials_path=None, credentials_bucket=cloud_storage.PUBLIC_BUCKET, labels=None, startup_url='', make_javascript_deterministic=True, shared_page_state_class=shared_page_state.SharedPageState, grouping_keys=None, cache_temperature=cache_temperature_module.ANY): self._url = url super(Page, self).__init__( shared_page_state_class, name=name, labels=labels, is_local=self._scheme in ['file', 'chrome', 'about'], make_javascript_deterministic=make_javascript_deterministic, grouping_keys=grouping_keys) self._page_set = page_set # Default value of base_dir is the directory of the file that defines the # class of this page instance. if base_dir is None: base_dir = os.path.dirname(inspect.getfile(self.__class__)) self._base_dir = base_dir self._name = name if credentials_path: credentials_path = os.path.join(self._base_dir, credentials_path) cloud_storage.GetIfChanged(credentials_path, credentials_bucket) if not os.path.exists(credentials_path): logging.error('Invalid credentials path: %s' % credentials_path) credentials_path = None self._credentials_path = credentials_path self._cache_temperature = cache_temperature if cache_temperature != cache_temperature_module.ANY: self.grouping_keys['cache_temperature'] = cache_temperature # Whether to collect garbage on the page before navigating & performing # page actions. self._collect_garbage_before_run = True # These attributes can be set dynamically by the page. self.synthetic_delays = dict() self._startup_url = startup_url self.credentials = None self.skip_waits = False self.script_to_evaluate_on_commit = None self._SchemeErrorCheck()
def _InstallBinary(self, bin_name, fallback_package=None): bin_path = binary_manager.FetchPath(bin_name, self.GetArchName(), self.GetOSName()) if not bin_path: raise Exception('Could not find the binary package %s' % bin_name) os.environ['PATH'] += os.pathsep + os.path.dirname(bin_path) try: cloud_storage.GetIfChanged(bin_path, cloud_storage.INTERNAL_BUCKET) os.chmod(bin_path, 0755) except cloud_storage.CloudStorageError, e: logging.error(str(e)) if fallback_package: raise Exception( 'You may proceed by manually installing %s via:\n' 'sudo apt-get install %s' % (bin_name, fallback_package))
def StartWPRServer(cls, archive_path=None, archive_bucket=None): """Start a webpage replay server. Args: archive_path: Path to the WPR file. If there is a corresponding sha1 file, this archive will be automatically downloaded from Google Storage. archive_bucket: The bucket to look for the WPR archive. """ assert cls._browser_options, ( 'Browser options must be set with |SetBrowserOptions| prior to ' 'starting WPR') assert not cls.browser, 'WPR must be started prior to browser being started' cloud_storage.GetIfChanged(archive_path, archive_bucket) cls.platform.network_controller.Open(wpr_modes.WPR_REPLAY, []) cls.platform.network_controller.StartReplay(archive_path=archive_path)
def _InstallWinRing0(): """WinRing0 is used for reading MSRs.""" executable_dir = os.path.dirname(sys.executable) python_is_64_bit = sys.maxsize > 2**32 dll_file_name = 'WinRing0x64.dll' if python_is_64_bit else 'WinRing0.dll' dll_path = os.path.join(executable_dir, dll_file_name) os_is_64_bit = platform.machine().endswith('64') driver_file_name = 'WinRing0x64.sys' if os_is_64_bit else 'WinRing0.sys' driver_path = os.path.join(executable_dir, driver_file_name) # Check for WinRing0 and download if needed. if not (os.path.exists(dll_path) and os.path.exists(driver_path)): win_binary_dir = os.path.join(path.GetTelemetryDir(), 'bin', 'win', 'AMD64') zip_path = os.path.join(win_binary_dir, 'winring0.zip') cloud_storage.GetIfChanged(zip_path, bucket=cloud_storage.PUBLIC_BUCKET) try: with zipfile.ZipFile(zip_path, 'r') as zip_file: error_message = ( 'Failed to extract %s into %s. If python claims that ' 'the zip file is locked, this may be a lie. The problem may be ' 'that python does not have write permissions to the destination ' 'directory.') # Install DLL. if not os.path.exists(dll_path): try: zip_file.extract(dll_file_name, executable_dir) except: logging.error(error_message % (dll_file_name, executable_dir)) raise # Install kernel driver. if not os.path.exists(driver_path): try: zip_file.extract(driver_file_name, executable_dir) except: logging.error(error_message % (driver_file_name, executable_dir)) raise finally: os.remove(zip_path)
def __init__(self, url, page_set=None, base_dir=None, name='', credentials_path=None, credentials_bucket=cloud_storage.PUBLIC_BUCKET, labels=None, startup_url='', make_javascript_deterministic=True, shared_page_state_class=shared_page_state.SharedPageState): self._url = url super(Page, self).__init__( shared_page_state_class, name=name, labels=labels, is_local=self._scheme in ['file', 'chrome', 'about'], make_javascript_deterministic=make_javascript_deterministic) self._page_set = page_set # Default value of base_dir is the directory of the file that defines the # class of this page instance. if base_dir is None: base_dir = os.path.dirname(inspect.getfile(self.__class__)) self._base_dir = base_dir self._name = name if credentials_path: credentials_path = os.path.join(self._base_dir, credentials_path) cloud_storage.GetIfChanged(credentials_path, credentials_bucket) if not os.path.exists(credentials_path): logging.error('Invalid credentials path: %s' % credentials_path) credentials_path = None self._credentials_path = credentials_path # These attributes can be set dynamically by the page. self.synthetic_delays = dict() self._startup_url = startup_url self.credentials = None self.skip_waits = False self.script_to_evaluate_on_commit = None self._SchemeErrorCheck()
def DownloadArchivesIfNeeded(self): """Downloads archives iff the Archive has a bucket parameter and the user has permission to access the bucket. Raises cloud storage Permissions or Credentials error when there is no local copy of the archive and the user doesn't have permission to access the archive's bucket. Warns when a bucket is not specified or when the user doesn't have permission to access the archive's bucket but a local copy of the archive exists. """ # Download all .wpr files. if not self._bucket: logging.warning( 'Story set in %s has no bucket specified, and ' 'cannot be downloaded from cloud_storage.', ) return assert 'archives' in self._data, 'Invalid data format in %s. \'archives\'' \ ' field is needed' % self._file_path for archive_path in self._data['archives']: archive_path = self._WprFileNameToPath(archive_path) try: cloud_storage.GetIfChanged(archive_path, self._bucket) except (cloud_storage.CredentialsError, cloud_storage.PermissionError): if os.path.exists(archive_path): # If the archive exists, assume the user recorded their own and # simply warn. logging.warning( 'Need credentials to update WPR archive: %s', archive_path) else: logging.error( "You either aren't authenticated or don't have " "permission to use the archives for this page set." "\nYou may need to run gsutil config." "\nYou can find instructions for gsutil config at: " "http://www.chromium.org/developers/telemetry/" "upload_to_cloud_storage") raise
def FindPath(binary_name, arch_name, platform_name): """Returns the path to the given binary name, pulling from the cloud if necessary.""" if platform_name == 'win': binary_name += '.exe' command = FindLocallyBuiltPath(binary_name) if not command and _IsInCloudStorage(binary_name, arch_name, platform_name): cloud_storage.GetIfChanged( _GetBinPath(binary_name, arch_name, platform_name), cloud_storage.PUBLIC_BUCKET) command = _GetBinPath(binary_name, arch_name, platform_name) # Ensure the downloaded file is actually executable. if command and os.path.exists(command): os.chmod(command, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR | stat.S_IRGRP) # Return an absolute path consistently. if command: command = os.path.abspath(command) return command
def testDisableCloudStorageIo(self, unused_lock_mock): os.environ['DISABLE_CLOUD_STORAGE_IO'] = '1' dir_path = 'real_dir_path' self.fs.CreateDirectory(dir_path) file_path = os.path.join(dir_path, 'file1') file_path_sha = file_path + '.sha1' self.CreateFiles([file_path, file_path_sha]) with open(file_path_sha, 'w') as f: f.write('hash1234') with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1', 'remote_path2') with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.Get('bucket', 'foo', file_path) with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.GetIfChanged(file_path, 'foo') with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.GetIfHashChanged('bar', file_path, 'bucket', 'hash1234') with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.Insert('bucket', 'foo', file_path) with self.assertRaises(cloud_storage.CloudStorageIODisabled): cloud_storage.GetFilesInDirectoryIfChanged(dir_path, 'bucket')