def MakeLongPathFile(self): long_path_txt = os.path.join(self.from_dir, LONG_SUB_DIRS, 'test2.txt') self.long_path_txt = long_path_txt if port_symlink.IsWindows(): long_path_txt = cobalt_archive_extract.ToWinUncPath(long_path_txt) _MakeDirs(os.path.dirname(long_path_txt)) with open(long_path_txt, 'w') as fd: fd.write('TEST BIS')
def _CopyAppLauncherTools(repo_root, dest_root, additional_glob_patterns, include_black_box_tests): # Step 1: Make sure dest_root is an absolute path. logging.info('Copying App Launcher tools to = %s', dest_root) dest_root = os.path.normpath(dest_root) if not os.path.isabs(dest_root): dest_root = os.path.join(os.getcwd(), dest_root) if port_symlink.IsWindows(): dest_root = cobalt_archive_extract.ToWinUncPath(dest_root) logging.info('Absolute destination path = %s', dest_root) # Step 2: Remove previous output directory if it exists if os.path.isdir(dest_root): shutil.rmtree(dest_root) # Step 3: Find all glob files from specified search directories. include_glob_patterns = _INCLUDE_FILE_PATTERNS if additional_glob_patterns: include_glob_patterns += additional_glob_patterns if include_black_box_tests: include_glob_patterns += _INCLUDE_BLACK_BOX_TESTS_PATTERNS copy_list = [] for d, glob_pattern in include_glob_patterns: flist = _FindFilesRecursive(os.path.join(repo_root, d), glob_pattern) copy_list.extend(flist) # Copy all src/*.py from repo_root without recursing down. for f in os.listdir(repo_root): src = os.path.join(repo_root, f) if os.path.isfile(src) and src.endswith('.py'): copy_list.append(src) # Order by file path string and remove any duplicate paths. copy_list = list(set(copy_list)) copy_list.sort() folders_logged = set() # Step 4: Copy the src files to the destination directory. for src in copy_list: tail_path = os.path.relpath(src, repo_root) dst = os.path.join(dest_root, tail_path) d = os.path.dirname(dst) if not os.path.isdir(d): os.makedirs(d) src_folder = os.path.dirname(src) if not src_folder in folders_logged: folders_logged.add(src_folder) logging.info(src_folder + ' -> ' + os.path.dirname(dst)) shutil.copy2(src, dst) # Step 5: Re-write the platform infos file in the new repo copy. _WritePlatformsInfo(repo_root, dest_root)
def _MoveFileWithLongPath(src_file, dst_file): dst_dir = os.path.dirname(dst_file) if port_symlink.IsWindows(): # Work around for file-length path limitations on Windows. src_dir = os.path.dirname(src_file) file_name = os.path.basename(src_file) shell_cmd = 'robocopy "%s" "%s" "%s" /MOV' % (src_dir, dst_dir, file_name) rc = _SilentCall(shell_cmd) if 1 != rc: # Robocopy returns 1 if a file was copied. raise OSError('File %s was not copied' % src_file) expected_out_file = os.path.join(dst_dir, file_name) if not _LongPathExists(expected_out_file): raise OSError('File did not end up in %s' % dst_dir) return else: if not os.path.isdir(dst_dir): os.makedirs(dst_dir) shutil.move(src_file, dst_file) if not os.path.isfile(dst_file): raise OSError('File did not end up in %s' % dst_dir)
def MakeArchive(self, platform_name, platform_sdk_version, config, file_list, # class FileList additional_buildinfo_dict=None): """Creates an archive for the given platform and config.""" logging.info('Making cobalt archive...') is_windows = port_symlink.IsWindows() if additional_buildinfo_dict is None: additional_buildinfo_dict = {} if config not in GetAllConfigs(): raise ValueError('Expected %s to be one of %s' % (config, GetAllConfigs())) additional_buildinfo_dict = dict(additional_buildinfo_dict) # Copy build_info_str = _GenerateBuildInfoStr( platform_name=platform_name, platform_sdk_version=platform_sdk_version, config=config, additional_buildinfo_dict=additional_buildinfo_dict) with zipfile.ZipFile(self.archive_zip_path, mode='w', compression=zipfile.ZIP_DEFLATED, allowZip64=True) as zf: # Copy the cobalt_archive_content directory into the root of the archive. content_file_list = filelist.FileList() content_file_list.AddAllFilesInPath(root_dir=_SRC_CONTENT_PATH, sub_path=_SRC_CONTENT_PATH) for file_path, archive_path in content_file_list.file_list: # Skip the fake metadata.json file because the real one # is a generated in it's place. if os.path.basename(file_path) == 'metadata.json': continue zf.write(file_path, arcname=archive_path) # Write out the metadata. zf.writestr(_OUT_METADATA_PATH, build_info_str) if file_list.file_list: logging.info(' Compressing %d files', len(file_list.file_list)) executable_files = [] n_file_list = len(file_list.file_list) progress_set = set() for i in range(n_file_list): # Logging every 5% increment during compression step. prog = int((float(i)/n_file_list) * 100) if prog not in progress_set: progress_set.add(prog) logging.info(' Compressed %d%%...', prog) file_path, archive_path = file_list.file_list[i] if not is_windows: perms = _GetFilePermissions(file_path) if (stat.S_IXUSR) & perms: executable_files.append(archive_path) # TODO: Use and implement _FoldIdenticalFiles() to reduce # duplicate files. This will help platforms like nxswitch which include # a lot of duplicate files for the sdk. try: zf.write(file_path, arcname=archive_path) except WindowsError: # pylint: disable=undefined-variable # Happens for long file path names. zf.write(cobalt_archive_extract.ToWinUncPath(file_path), arcname=archive_path) if file_list.symlink_dir_list: logging.info(' Compressing %d symlinks', len(file_list.symlink_dir_list)) # Generate the decompress.json file used by decompress.py. # Removes the first element which is the root directory, which is not # important for symlink creation. symlink_dir_list = [l[1:] for l in file_list.symlink_dir_list] # Replace '\\' with '/' symlink_dir_list = [_ToUnixPaths(l) for l in symlink_dir_list] decompress_json_str = _JsonDumpPrettyPrint({ 'symlink_dir': symlink_dir_list, 'symlink_dir_doc': '[link_dir_path, target_dir_path]', 'executable_files': executable_files, }) zf.writestr(_OUT_DECOMP_JSON, decompress_json_str) logging.info('Done...')
class CobaltArchiveTest(unittest.TestCase): def testFoldIdenticalFiles(self): tf_root = filelist_test.TempFileSystem('bundler_fold') tf_root.Clear() tf1 = filelist_test.TempFileSystem(os.path.join('bundler_fold', '1')) tf2 = filelist_test.TempFileSystem(os.path.join('bundler_fold', '2')) tf1.Make() tf2.Make() flist = filelist.FileList() subdirs = [tf1.root_in_tmp, tf2.root_in_tmp] flist.AddAllFilesInPaths(tf_root.root_tmp, subdirs) flist.Print() identical_files = [tf1.test_txt, tf2.test_txt] physical_files, copy_files = cobalt_archive._FoldIdenticalFiles( identical_files) self.assertEqual(tf1.test_txt, physical_files[0]) self.assertIn(tf1.test_txt, copy_files[0][0]) self.assertIn(tf2.test_txt, copy_files[0][1]) def testMakesDeployInfo(self): flist = filelist.FileList() tf = filelist_test.TempFileSystem() tf.Clear() tf.Make() bundle_zip = os.path.join(tf.root_tmp, 'bundle.zip') car = cobalt_archive.CobaltArchive(bundle_zip) car.MakeArchive(platform_name='fake', platform_sdk_version='fake_sdk', config='devel', file_list=flist) out_dir = os.path.join(tf.root_tmp, 'out') car.ExtractTo(out_dir) out_metadata_file = os.path.join(out_dir, cobalt_archive._OUT_METADATA_PATH) self.assertEqual(filelist.GetFileType(out_metadata_file), filelist.TYPE_FILE) with open(out_metadata_file) as fd: text = fd.read() js = json.loads(text) self.assertTrue(js) self.assertEqual(js['sdk_version'], 'fake_sdk') self.assertEqual(js['platform'], 'fake') self.assertEqual(js['config'], 'devel') def testExtractTo(self): flist = filelist.FileList() tf = filelist_test.TempFileSystem() tf.Clear() tf.Make() flist.AddFile(tf.root_in_tmp, tf.test_txt) flist.AddSymLink(tf.root_in_tmp, tf.sym_dir) bundle_zip = os.path.join(tf.root_tmp, 'bundle.zip') car = cobalt_archive.CobaltArchive(bundle_zip) car.MakeArchive(platform_name='fake', platform_sdk_version='fake_sdk', config='devel', file_list=flist) out_dir = os.path.join(tf.root_tmp, 'out') car.ExtractTo(out_dir) out_from_dir = os.path.join(out_dir, 'from_dir') out_from_dir_lnk = os.path.join(out_dir, 'from_dir_lnk') self.assertEqual(filelist.GetFileType(out_from_dir), filelist.TYPE_DIRECTORY) self.assertEqual(filelist.GetFileType(out_from_dir_lnk), filelist.TYPE_SYMLINK_DIR) resolved_from_link_path = os.path.join( out_dir, port_symlink.ReadSymLink(out_from_dir_lnk)) self.assertEqual(os.path.abspath(out_from_dir), os.path.abspath(resolved_from_link_path)) def testExtractFileWithLongFileName(self): """Tests that a long file name can be archived and extracted.""" flist = filelist.FileList() tf = filelist_test.TempFileSystem() tf.Clear() tf.Make() self.assertTrue(os.path.exists(tf.root_in_tmp)) suffix_path = os.path.join( 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', 'test.txt') input_dst = os.path.join(tf.root_in_tmp, suffix_path) out_dir = os.path.join(tf.root_tmp, 'out') output_dst = os.path.join(out_dir, suffix_path) _MoveFileWithLongPath(tf.test_txt, input_dst) self.assertTrue(_LongPathExists(input_dst)) flist.AddFile(tf.root_in_tmp, input_dst) bundle_zip = os.path.join(tf.root_tmp, 'bundle.zip') car = cobalt_archive.CobaltArchive(bundle_zip) car.MakeArchive(platform_name='fake', platform_sdk_version='fake_sdk', config='devel', file_list=flist) car.ExtractTo(out_dir) self.assertTrue(_LongPathExists(output_dst)) @unittest.skipIf(port_symlink.IsWindows(), 'Any platform but windows.') def testExecutionAttribute(self): flist = filelist.FileList() tf = filelist_test.TempFileSystem() tf.Make() # Execution bit seems to turn off the read bit, so we just set all # read/write/execute bit for the user. write_flags = stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR os.chmod(tf.test_txt, write_flags) self.assertNotEqual( 0, write_flags & cobalt_archive._GetFilePermissions(tf.test_txt)) flist.AddFile(tf.root_tmp, tf.test_txt) bundle_zip = os.path.join(tf.root_tmp, 'bundle.zip') car = cobalt_archive.CobaltArchive(bundle_zip) car.MakeArchive(platform_name='fake', platform_sdk_version='fake_sdk', config='devel', file_list=flist) # Now grab the json file and check that the file appears in the # executable_file list. json_str = car.ReadFile( '__cobalt_archive/finalize_decompression/decompress.json') decompress_dict = json.loads(json_str) executable_files = decompress_dict.get('executable_files') # Expect that the executable file appears in the executable_files. self.assertTrue(executable_files) archive_path = os.path.relpath(tf.test_txt, tf.root_tmp) self.assertIn(archive_path, executable_files) out_dir = os.path.join(tf.root_tmp, 'out') car.ExtractTo(output_dir=out_dir) out_file = os.path.join(out_dir, tf.test_txt) self.assertTrue(_LongPathExists(out_file)) perms = cobalt_archive._GetFilePermissions(out_file) self.assertTrue(perms & stat.S_IXUSR)
def _LongPathExists(p): if port_symlink.IsWindows(): rc = _SilentCall('dir /s /b "%s"' % p) return rc == 0 else: return os.path.isfile(p)