def testCopy(self): path1 = '/some/local/path' path2 = '/other/local/path' relative_path = 'relative.bin' self.mox.StubOutWithMock(filelib, 'Exists') self.mox.StubOutWithMock(osutils, 'SafeMakedirs') self.mox.StubOutWithMock(filelib.shutil, 'copy2') # Set up the test replay script. # Run 1, path2 directory exists. filelib.Exists(os.path.dirname(path2), as_dir=True).AndReturn(True) filelib.shutil.copy2(path1, path2) # Run 2, path2 directory does not exist. filelib.Exists(os.path.dirname(path2), as_dir=True).AndReturn(False) osutils.SafeMakedirs(os.path.dirname(path2)) filelib.shutil.copy2(path1, path2) # Run 3, there is target directory is '.', don't test existence. filelib.shutil.copy2(path1, relative_path) self.mox.ReplayAll() # Run the test verifications, three times. filelib.Copy(path1, path2) filelib.Copy(path1, path2) filelib.Copy(path1, relative_path) self.mox.VerifyAll()
def testCopy(self): gs_path = 'gs://bucket/some/path' local_path = '/some/local/path' http_path = 'http://host.domain/some/path' result = 'TheResult' self.mox.StubOutWithMock(gslib, 'Copy') self.mox.StubOutWithMock(filelib, 'Copy') self.mox.StubOutWithMock(urilib, 'URLRetrieve') # Set up the test replay script. # Run 1, two local files. filelib.Copy(local_path, local_path + '.1').AndReturn(result) # Run 2, local and GS. gslib.Copy(local_path, gs_path).AndReturn(result) # Run 4, GS and GS gslib.Copy(gs_path, gs_path + '.1').AndReturn(result) # Run 7, HTTP and local urilib.URLRetrieve(http_path, local_path).AndReturn(result) # Run 8, local and HTTP self.mox.ReplayAll() # Run the test verification. self.assertEquals(result, urilib.Copy(local_path, local_path + '.1')) self.assertEquals(result, urilib.Copy(local_path, gs_path)) self.assertEquals(result, urilib.Copy(gs_path, gs_path + '.1')) self.assertEquals(result, urilib.Copy(http_path, local_path)) self.assertRaises(urilib.NotSupportedBetweenTypes, urilib.Copy, local_path, http_path) self.mox.VerifyAll()
def Copy(src_uri, dest_uri): """Copy one uri to another. Args: src_uri: URI to copy from. dest_uri: Path to copy to. Raises: NotSupportedBetweenTypes if Cmp cannot be done between the two URIs provided. """ uri_type1 = GetUriType(src_uri) uri_type2 = GetUriType(dest_uri) uri_types = set([uri_type1, uri_type2]) if TYPE_GS in uri_types: # GS only supported between other GS files or local files. if len(uri_types) == 1 or TYPE_LOCAL in uri_types: return gslib.Copy(src_uri, dest_uri) if TYPE_LOCAL in uri_types and len(uri_types) == 1: return filelib.Copy(src_uri, dest_uri) if uri_type1 in (TYPE_HTTP, TYPE_HTTPS) and uri_type2 == TYPE_LOCAL: # Download file from URL. return URLRetrieve(src_uri, dest_uri) raise NotSupportedBetweenTypes(uri_type1, uri_type2)
def testCopyIntoNewDir(self): """Copy a file into a dir that does not yet exist.""" path1 = os.path.join(self.tempdir, 'path1') subdir = os.path.join(self.tempdir, 'subdir') path2 = os.path.join(subdir, 'path2') osutils.Touch(path1) filelib.Copy(path1, path2) self.assertExists(path2)
def testCopyRelative(self): """Copy a file using relative destination.""" path1 = os.path.join(self.tempdir, 'path1') path2 = os.path.join(self.tempdir, 'path2') relative_path = os.path.basename(path2) os.chdir(self.tempdir) osutils.Touch(path1) filelib.Copy(path1, relative_path) self.assertExists(path2)
def testCopyIntoExistingDir(self): """Copy a file into a dir that exists.""" path1 = os.path.join(self.tempdir, 'path1') subdir = os.path.join(self.tempdir, 'subdir') path2 = os.path.join(subdir, 'path2') osutils.Touch(path1) osutils.SafeMakedirs(subdir) filelib.Copy(path1, path2) self.assertExists(path2)
def GetHashSignatures(self, hashes, keysets=('update_signer', )): """See SignerPayloadsClientGoogleStorage._GetHashsignatures(). Instead of waiting for the signers to sign the hashes, we just sign then and copy them to the requested files. It doesn't really support keysets at this point. Args: Look at SignerPayloadsClientGoogleStorage.GetHashsignatures() Returns: Look at SignerPayloadsClientGoogleStorage.GetHashsignatures() """ logging.info('Signing the hashes with unoffical keys.') key_path = os.path.join(self._work_dir, 'update_key.pem') filelib.Copy(self._private_key, key_path) signatures = [] for h in hashes: hash_hex = binascii.hexlify(h) hash_file = os.path.join(self._work_dir, 'hash-%s.bin' % hash_hex) signature_file = os.path.join(self._work_dir, 'signature-%s.bin' % hash_hex) osutils.WriteFile(hash_file, h, mode='wb') sign_script = path_util.ToChrootPath( os.path.join( constants.SOURCE_ROOT, 'src/platform/vboot_reference/scripts/image_signing/', 'sign_official_build.sh')) cros_build_lib.run([ sign_script, 'update_payload', path_util.ToChrootPath(hash_file), path_util.ToChrootPath(self._work_dir), path_util.ToChrootPath(signature_file) ], enter_chroot=True) signatures.append([osutils.ReadFile(signature_file, mode='rb')]) return signatures
def _PrepareImage(self, image, image_file): """Download an prepare an image for delta generation. Preparation includes downloading, extracting and converting the image into an on-disk format, as necessary. Args: image: an object representing the image we're processing, either UnsignedImageArchive or Image type from gspaths module. image_file: file into which the prepared image should be copied. """ logging.info('Preparing image from %s as %s', image.uri, image_file) # Figure out what we're downloading and how to handle it. image_handling_by_type = { 'signed': (None, True), 'test': (self.TEST_IMAGE_NAME, False), 'recovery': (self.RECOVERY_IMAGE_NAME, True), 'base': (self.BASE_IMAGE_NAME, True), } if gspaths.IsImage(image): # No need to extract. extract_file = None elif gspaths.IsUnsignedImageArchive(image): extract_file, _ = image_handling_by_type[image.get( 'image_type', 'signed')] else: raise Error('Unknown image type %s' % type(image)) # Are we donwloading an archive that contains the image? if extract_file: # Archive will be downloaded to a temporary location. with tempfile.NamedTemporaryFile(prefix='image-archive-', suffix='.tar.xz', dir=self.work_dir, delete=False) as temp_file: download_file = temp_file.name else: download_file = image_file # Download the image file or archive. If it was just a local file, ignore # caching and do a simple copy. TODO(crbug.com/926034): Add a caching # mechanism for local files. if urilib.GetUriType(image.uri) == urilib.TYPE_LOCAL: filelib.Copy(image.uri, download_file) else: self._cache.GetFileCopy(image.uri, download_file) # If we downloaded an archive, extract the image file from it. if extract_file: cmd = ['tar', '-xJf', download_file, extract_file] cros_build_lib.run(cmd, cwd=self.work_dir) # Rename it into the desired image name. shutil.move(os.path.join(self.work_dir, extract_file), image_file) # It should be safe to delete the archive at this point. # TODO(crbug/1016555): consider removing the logging once resolved. logging.info('Removing %s', download_file) os.remove(download_file)