def testListFiles(self): gs_path = 'gs://bucket/some/path' local_path = '/some/local/path' http_path = 'http://host.domain/some/path' result = 'TheResult' patt = 'TheFilePattern' self.mox.StubOutWithMock(gslib, 'ListFiles') self.mox.StubOutWithMock(filelib, 'ListFiles') # Set up the test replay script. # Run 1, local. filelib.ListFiles(local_path, recurse=True, filepattern=None, sort=False).AndReturn(result) # Run 2, GS. gslib.ListFiles(gs_path, recurse=False, filepattern=patt, sort=True).AndReturn(result) # Run 4, HTTP. self.mox.ReplayAll() # Run the test verification. self.assertEquals(result, urilib.ListFiles(local_path, recurse=True)) self.assertEquals( result, urilib.ListFiles(gs_path, filepattern=patt, sort=True)) self.assertRaises(urilib.NotSupportedForType, urilib.ListFiles, http_path) self.mox.VerifyAll()
def FindExistingPayloads(payload): """Look to see if any matching payloads already exist. Since payload names contain a random component, there can be multiple names for a given payload. This function lists all existing payloads that match the default URI for the given payload. Args: payload: gspaths.Payload instance. Returns: List of URIs for existing payloads that match the default payload pattern. """ search_uri = DefaultPayloadUri(payload, random_str='*') return _FilterNonPayloadUris(urilib.ListFiles(search_uri))
def testFindExistingPayloads(self): """Test finding already existing payloads.""" self.mox.StubOutWithMock(urilib, 'ListFiles') # Set up the test replay script. urilib.ListFiles('gs://chromeos-releases/dev-channel/x86-alex/1620.0.0/' 'payloads/chromeos_1620.0.0_x86-alex_dev-channel_full_' 'mp-v3.bin-*.signed').AndReturn(['foo_result']) # Run the test verification. self.mox.ReplayAll() self.assertEqual( paygen_payload_lib.FindExistingPayloads(self.full_payload), ['foo_result'])
def _MapToArchive(self, board, version): """Returns the chromeos-image-archive equivalents for the build. Args: board: The board name (per chromeos-releases). version: The build version. Returns: A tuple consisting of the archive board name, build name and build URI. Raises: ArchiveError: if we could not compute the mapping. """ # Map chromeos-releases board name to its chromeos-image-archive equivalent. archive_board_candidates = set([ archive_board for archive_board in self._site_config.GetBoards() if archive_board.replace('_', '-') == board ]) if len(archive_board_candidates) == 0: raise ArchiveError('could not find build board name for %s' % board) elif len(archive_board_candidates) > 1: raise ArchiveError('found multiple build board names for %s: %s' % (board, ', '.join(archive_board_candidates))) archive_board = archive_board_candidates.pop() # Find something in the respective chromeos-image-archive build directory. archive_build_search_uri = gspaths.ChromeosImageArchive.BuildUri( archive_board, '*', version) archive_build_file_uri_list = urilib.ListFiles( archive_build_search_uri) if not archive_build_file_uri_list: raise ArchiveError('cannot find archive build directory for %s' % archive_build_search_uri) # Use the first search result. uri_parts = urlparse.urlsplit(archive_build_file_uri_list[0]) archive_build_path = os.path.dirname(uri_parts.path) archive_build = archive_build_path.strip('/') archive_build_uri = urlparse.urlunsplit( (uri_parts.scheme, uri_parts.netloc, archive_build_path, '', '')) return archive_board, archive_build, archive_build_uri
def testIntegration(self): self._SetUpDirs() self.assertTrue(urilib.Exists(self.filesdir, as_dir=True)) self.assertTrue(urilib.Exists(self.file1_local)) self.assertTrue(urilib.Exists(self.file2_local)) self.assertTrue(urilib.Exists(self.subfile_local)) self.assertTrue(urilib.Exists(self.subdir_local, as_dir=True)) self.assertFalse(urilib.Exists(self.file1_gs)) self.assertFalse(urilib.Exists(self.file2_gs)) self.assertFalse(urilib.Exists(self.subfile_gs)) shallow_local_files = [self.file1_local, self.file2_local] deep_local_files = shallow_local_files + [self.subfile_local] shallow_gs_files = [self.file1_gs, self.file2_gs] deep_gs_files = shallow_gs_files + [self.subfile_gs] # Test ListFiles, local version. self.assertEquals(set(shallow_local_files), set(urilib.ListFiles(self.filesdir))) self.assertEquals(set(deep_local_files), set(urilib.ListFiles(self.filesdir, recurse=True))) # Test CopyFiles, from local to GS. self.assertEquals(set(deep_gs_files), set(urilib.CopyFiles(self.filesdir, self.GS_DIR))) # Test ListFiles, GS version. self.assertEquals(set(shallow_gs_files), set(urilib.ListFiles(self.GS_DIR))) self.assertEquals(set(deep_gs_files), set(urilib.ListFiles(self.GS_DIR, recurse=True))) # Test Cmp between some files. self.assertTrue(urilib.Cmp(self.file1_local, self.file1_gs)) self.assertFalse(urilib.Cmp(self.file2_local, self.file1_gs)) # Test RemoveDirContents, local version. urilib.RemoveDirContents(self.filesdir) self.assertFalse(urilib.ListFiles(self.filesdir)) # Test CopyFiles, from GS to local. self.assertEquals(set(deep_local_files), set(urilib.CopyFiles(self.GS_DIR, self.filesdir))) # Test RemoveDirContents, GS version. urilib.RemoveDirContents(self.GS_DIR) self.assertFalse(urilib.ListFiles(self.GS_DIR))
def _FindFullTestPayloads(self, channel, version): """Returns a list of full test payloads for a given version. Uses the current build's board and bucket values. This method caches the full test payloads previously discovered as we may be using them for multiple tests in a single run. Args: channel: Channel to look in for payload. version: A build version whose payloads to look for. Returns: A (possibly empty) list of payload URIs. """ assert channel assert version if (channel, version) in self._version_to_full_test_payloads: # Serve from cache, if possible. return self._version_to_full_test_payloads[(channel, version)] payload_search_uri = gspaths.ChromeosReleases.PayloadUri( channel, self._build.board, version, '*', bucket=self._build.bucket) payload_candidate = urilib.ListFiles(payload_search_uri) # We create related files for each payload that have the payload name # plus these extensions. Skip these files. NOT_PAYLOAD = ('.json', '.log') full_test_payloads = [ u for u in payload_candidate if not any([u.endswith(n) for n in NOT_PAYLOAD]) ] # Store in cache. self._version_to_full_test_payloads[(channel, version)] = full_test_payloads return full_test_payloads
def _DiscoverSignedImages(self, build): """Return a list of images associated with a given build. Args: build: The build to find images for. Returns: A list of images associated with the build. This may include premp, and mp images. Raises: BuildCorrupt: Raised if unexpected images are found. ImageMissing: Raised if expected images are missing. """ # Ideally, |image_type| below should be constrained to the type(s) expected # for the board. But the board signing configs are not easily accessible at # this point, so we use the wildcard here and rely on the signers to upload # the expected artifacts. search_uri = gspaths.ChromeosReleases.ImageUri(build.channel, build.board, build.version, key='*', image_type='*', image_channel='*', image_version='*', bucket=build.bucket) image_uris = urilib.ListFiles(search_uri) images = [ gspaths.ChromeosReleases.ParseImageUri(uri) for uri in image_uris ] # Unparsable URIs will result in Nones; filter them out. images = [i for i in images if i] # We only care about recovery and test image types, ignore all others. images = _FilterForValidImageType(images) self._ValidateExpectedBuildImages(build, images) return images
def _DiscoverTestImage(self, build): """Return a list of unsigned image archives associated with a given build. Args: build: The build to find images for. Returns: A gspaths.UnsignedImageArchive instance. Raises: BuildCorrupt: Raised if unexpected images are found. ImageMissing: Raised if expected images are missing. """ search_uri = gspaths.ChromeosReleases.UnsignedImageUri( build.channel, build.board, build.version, milestone='*', image_type='test', bucket=build.bucket) image_uris = urilib.ListFiles(search_uri) images = [ gspaths.ChromeosReleases.ParseUnsignedImageUri(uri) for uri in image_uris ] # Unparsable URIs will result in Nones; filter them out. images = [i for i in images if i] # Make sure we found the expected number of build images (1). if len(images) > 1: raise BuildCorrupt('%s has multiple test images: %s' % (build, images)) if not images: raise ImageMissing('%s has no test image' % build) return images[0]