def test_thin_to_fat_static(self): other_items = [ temp_archive.item('foo.txt', content = 'foo.txt\n'), temp_archive.item('bar.txt', content = 'bar.txt\n'), ] i386_archive = self.__make_test_archive('lib/libsomething.a', 'i386', other_items) x86_64_archive = self.__make_test_archive('lib/libsomething.a', 'x86_64', other_items) armv7_archive = self.__make_test_archive('lib/libsomething.a', 'armv7', other_items) self.assertEqual( [ 'i386' ], lipo.archs(self.__test_file('libi386.a') ) ) self.assertEqual( [ 'x86_64' ], lipo.archs(self.__test_file('libx86_64.a') ) ) self.assertEqual( [ 'armv7' ], lipo.archs(self.__test_file('libarmv7.a') ) ) thin_packages = [ i386_archive.filename, x86_64_archive.filename, armv7_archive.filename, ] tmp_dir = temp_file.make_temp_dir(delete = not self.DEBUG) fat_package = path.join(tmp_dir, 'fat.tgz') fat_archive.thin_to_fat(thin_packages, fat_package) print('CACA: thin_to_fat: thin=%s fat=%s' % (thin_packages, fat_package)) self.assertTrue( archiver.is_valid(fat_package) ) tmp_extract_dir = temp_file.make_temp_dir(delete = not self.DEBUG) archiver.extract(fat_package, tmp_extract_dir) fat_library = path.join(tmp_extract_dir, 'lib/libsomething.a') self.assertTrue( path.isfile(path.join(tmp_extract_dir, 'foo.txt')) ) self.assertTrue( path.isfile(path.join(tmp_extract_dir, 'bar.txt')) ) self.assertTrue( path.isfile(fat_library) ) print('CACA: archs for %s: %s' % (fat_library, lipo.archs(fat_library)))
def test_sync(self): tmp_dst_dir = temp_file.make_temp_dir(delete = not self.DEBUG) files = [ 'files', 'files/1', 'files/1/2', 'files/1/2/3', 'files/1/2/3/4', 'files/1/2/3/4/5', 'files/1/2/3/4/5/apple.txt', 'files/1/2/3/4/5/kiwi.txt', 'files/bar.txt', 'files/empty', 'files/foo.txt', 'files/kiwi_link.txt', ] src_dir = temp_file.make_temp_dir(delete = not self.DEBUG) tar_util.copy_tree_with_tar(self.data_dir(), src_dir) num_links_before = self.num_links(src_dir) sync.sync_files(src_dir, tmp_dst_dir, files, 'foo') actual_files = file_find.find(tmp_dst_dir, file_type = file_find.ANY) self.assertEqual( actual_files, files ) num_links_after = self.num_links(src_dir) self.assertEqual( num_links_before, [ n - 1 for n in num_links_after ] )
def _make_context(self, niceness_level = None, timeout = None, deleter = None): tmp_dir = temp_file.make_temp_dir() trash_dir = path.join(tmp_dir, 'trash') stuff_dir = path.join(tmp_dir, 'stuff') trash = file_trash(temp_file.make_temp_dir(), niceness_level = niceness_level, timeout = timeout, deleter = deleter) return self._context(trash_dir, stuff_dir, trash)
def test_install(self): tmp_basename = 'foo.sh' tmp_src_dir = temp_file.make_temp_dir() tmp_filename = path.join(tmp_src_dir, tmp_basename) file_util.save(tmp_filename, content='this is foo.') tmp_dest_dir = temp_file.make_temp_dir() install.install(tmp_filename, tmp_dest_dir) dest_path = path.join(tmp_dest_dir, tmp_basename) self.assertTrue(path.exists(dest_path))
def _make_test_pm_with_am(clazz): root_dir = temp_file.make_temp_dir(delete=not clazz.DEBUG) pm_dir = path.join(root_dir, 'package_manager') if clazz.DEBUG: print("\nroot_dir:\n", root_dir) am = clazz._make_test_artifact_manager() return package_manager(pm_dir, am)
def test_extract_all(self): tmp_dir = temp_file.make_temp_dir() info1 = dmg.info() dmg.extract(self.data_path('example.dmg'), tmp_dir) info2 = dmg.info() files = file_find.find(tmp_dir, relative = True, file_type = file_find.FILE_OR_LINK) self.assertEqual( [ 'foo.txt', 'link_to_foo.sh', 'subdir/bar.txt' ], files )
def test_thin_to_fat_bad_normals_checksums(self): i386_other_items = [ temp_archive.item('foo.txt', content = 'i386 foo.txt\n'), temp_archive.item('bar.txt', content = 'i386 bar.txt\n'), ] i386_archive = self.__make_test_archive('lib/libsomething.a', 'i386', i386_other_items) x86_64_other_items = [ temp_archive.item('foo.txt', content = 'x86_64 foo.txt\n'), temp_archive.item('bar.txt', content = 'x86_64 bar.txt\n'), ] x86_64_archive = self.__make_test_archive('lib/libsomething.a', 'x86_64', x86_64_other_items) armv7_other_items = [ temp_archive.item('foo.txt', content = 'armv7 foo.txt\n'), temp_archive.item('bar.txt', content = 'armv7 bar.txt\n'), ] armv7_archive = self.__make_test_archive('lib/libsomething.a', 'armv7', armv7_other_items) thin_packages = [ i386_archive.filename, x86_64_archive.filename, armv7_archive.filename, ] tmp_dir = temp_file.make_temp_dir(delete = not self.DEBUG) fat_package = path.join(tmp_dir, 'fat.tgz') with self.assertRaises(RuntimeError) as context: fat_archive.thin_to_fat(thin_packages, fat_package) self.assertTrue( archiver.is_valid(fat_package) )
def test_move_files(self): tmp_dir = temp_file.make_temp_dir() src_dir = path.join(tmp_dir, 'src') dst_dir = path.join(tmp_dir, 'dst') file_util.mkdir(dst_dir) temp_content.write_items([ 'file foo.txt "This is foo.txt\n" 644', 'file bar.txt "This is bar.txt\n" 644', 'file sub1/sub2/baz.txt "This is baz.txt\n" 644', 'file yyy/zzz/vvv.txt "This is vvv.txt\n" 644', 'file .hidden "this is .hidden\n" 644', 'file script.sh "#!/bin/bash\necho script.sh\nexit 0\n" 755', 'file .hushlogin "" 644', ], src_dir) expected = [ '.hidden', '.hushlogin', 'bar.txt', 'foo.txt', 'script.sh', 'sub1/sub2/baz.txt', 'yyy/zzz/vvv.txt', ] self.assertEqual( expected, file_find.find(src_dir, relative = True)) dir_util.move_files(src_dir, dst_dir) self.assertEqual( expected, file_find.find(dst_dir, relative = True)) self.assertEqual( [], file_find.find(src_dir, relative = True))
def test_extract_all_overlap_with_base_dir_and_strip_common_ancestor(self): assert self.default_archive_type items1 = temp_archive.make_temp_item_list([ ( 'base-1.2.3/foo.txt', 'foo.txt\n' ), ( 'base-1.2.3/subdir/bar.txt', 'bar.txt\n' ), ]) items2 = temp_archive.make_temp_item_list([ ( 'notbase-1.2.3/orange.txt', 'orange.txt\n' ), ( 'notbase-1.2.3/subdir/kiwi.txt', 'kiwi.txt\n' ), ]) tmp_archive1 = self.make_temp_archive_for_reading(items1) tmp_archive2 = self.make_temp_archive_for_reading(items2) tmp_dir = temp_file.make_temp_dir() base_dir = 'foo-6.6.6' tmp_archive1.extract_all(tmp_dir, base_dir = base_dir, strip_common_ancestor = True) tmp_archive2.extract_all(tmp_dir, base_dir = base_dir, strip_common_ancestor = True) actual_files = file_find.find(tmp_dir, relative = True) expected_files = [ 'foo-6.6.6/foo.txt', 'foo-6.6.6/orange.txt', 'foo-6.6.6/subdir/bar.txt', 'foo-6.6.6/subdir/kiwi.txt', ] self.assertEqual( expected_files, actual_files )
def test_create_base_dir(self): self.maxDiff = None items = temp_archive.make_temp_item_list([ ( 'base-1.2.3/foo.txt', 'foo.txt\n' ), ( 'base-1.2.3/bar.txt', 'bar.txt\n' ), ]) tmp_dir = temp_archive.write_temp_items(items) base_dir = 'foo-666' archive = self.make_temp_archive_for_writing() archive.create(tmp_dir, base_dir = base_dir) self.assertTrue( path.isfile(archive.filename) ) tmp_extract_dir = temp_file.make_temp_dir() archive.extract_all(tmp_extract_dir) def _remove_base_dir(f): return file_util.remove_head(f, base_dir) self._compare_dirs(tmp_dir, tmp_extract_dir, transform = _remove_base_dir) file_util.remove([ tmp_dir, tmp_extract_dir])
def _test_extract_with_include_exclude(self, items, include, exclude): tmp_archive = self.make_temp_archive_for_reading(items) tmp_dir = temp_file.make_temp_dir() tmp_archive.extract(tmp_dir, include = include, exclude = exclude) actual_files = file_find.find(tmp_dir, relative = True) file_util.remove(tmp_dir) return actual_files
def test_cached_content(self): tmp_root_dir = temp_file.make_temp_dir(prefix = 'test_cached_root_', suffix = '.dir', delete = not self.DEBUG) tmp_filename = temp_file.make_temp_file(content = 'foo\n', delete = not self.DEBUG) expected_content = file_util.read(tmp_filename) actual_content = file_cache.cached_content(tmp_filename, root_dir = tmp_root_dir) self.assertEqual( expected_content, actual_content )
def grep(clazz, tarball, pattern): 'Return the output of ag (silver searcher) for an archive.' tmp_dir = temp_file.make_temp_dir() archiver.extract(tarball, tmp_dir, strip_common_ancestor = True) result = execute.execute('ag %s .' % (pattern), cwd = tmp_dir, shell = True, raise_error = False).stdout file_util.remove(tmp_dir) return result
def extract(self, dest_dir, base_dir = None, strip_common_ancestor = False, strip_head = None, include = None, exclude = None): dest_dir = self._determine_dest_dir(dest_dir, base_dir) filtered_members = self._filter_for_extract(self.members, include, exclude) if filtered_members == self.members: return self.extract_all(dest_dir, base_dir = base_dir, strip_common_ancestor = strip_common_ancestor, strip_head = strip_head) return self.extract_all(dest_dir, base_dir = base_dir, strip_common_ancestor = strip_common_ancestor, strip_head = strip_head) self._handle_post_extract(dest_dir, include, exclude) return # Cheat by using a temporary zip file to do the actual work. Super innefecient but # easy since theres no library to extract just some stuff from dmg files. tmp_dir = temp_file.make_temp_dir() dmg.extract(self.filename, tmp_dir) tmp_zip = temp_file.make_temp_file(suffix = '.zip') az = archive_zip(tmp_zip) az.create(tmp_dir) az.extract(dest_dir, base_dir = base_dir, strip_common_ancestor = strip_common_ancestor, strip_head = strip_head, include = include, exclude = exclude) file_util.remove(tmp_zip) file_util.remove(tmp_dir)
def test_create_from_packages(self): tmp_dir = temp_file.make_temp_dir(delete=not self.DEBUG) jail_config_content = ''' [jail] description: test packages: %s [%s] ''' % (self.__PACKAGE_ID, self.__PACKAGE_ID) tmp_jail_config = temp_file.make_temp_file(content=jail_config_content) cmd = [ self.__BES_JAIL_PY, 'create', tmp_dir, tmp_jail_config, ] rv = os_env.call_python_script(cmd) print(rv.stdout) self.assertEqual(0, rv.exit_code) expected_files = npm.package_contents(self.__PACKAGE_ID) actual_files = file_find.find(tmp_dir, file_type=file_find.ANY, relative=True) actual_files = [path.join('/', f) for f in actual_files] self.assertEqual(expected_files, actual_files)
def test_extract_all_overlap(self): assert self.default_archive_type items1 = temp_archive.make_temp_item_list([ ( 'base-1.2.3/foo.txt', 'foo.txt\n' ), ( 'base-1.2.3/bar.txt', 'bar.txt\n' ), ]) items2 = temp_archive.make_temp_item_list([ ( 'base-1.2.3/orange.txt', 'orange.txt\n' ), ( 'base-1.2.3/kiwi.txt', 'kiwi.txt\n' ), ]) tmp_archive1 = self.make_temp_archive_for_reading(items1) tmp_archive2 = self.make_temp_archive_for_reading(items2) tmp_dir = temp_file.make_temp_dir() tmp_archive1.extract_all(tmp_dir) tmp_archive2.extract_all(tmp_dir) actual_files = file_find.find(tmp_dir, relative = True) expected_files = [ 'base-1.2.3/bar.txt', 'base-1.2.3/foo.txt', 'base-1.2.3/kiwi.txt', 'base-1.2.3/orange.txt', ] self.assertEqual( expected_files, actual_files )
def thin_to_fat(clazz, thin_packages_filenames, fat_package_filename, lipo_exe=None): tmp_extract_dir = temp_file.make_temp_dir() thin_packages = clazz.__load_thin_packages(thin_packages_filenames, tmp_extract_dir, lipo_exe=lipo_exe) # Check that the non object files in all thin packages are the same for thin_package in thin_packages: others = [p for p in thin_packages if p != thin_package] success, failed_package = thin_package.compare_normals(others) if not success: raise RuntimeError( 'The content of non object files in %s and %s does not match.' % (thin_package.filename, failed_package.filename)) # Collect the objects into a dictionary keyed by the arcname in the final fat package files = {} for thin_package in thin_packages: for obj in thin_package.members.objects: if obj.member not in files: files[obj.member] = [] files[obj.member].append(obj) tmp_repack_dir = temp_file.make_temp_dir() for arcname, thin_objects in files.items(): fat_object_filename = path.join(tmp_repack_dir, arcname) thin_objects_filenames = [obj.filename for obj in thin_objects] file_util.mkdir(path.dirname(fat_object_filename)) lipo.thin_to_fat(thin_objects_filenames, fat_object_filename, lipo_exe=lipo_exe) # Extract the normal non object files for thin_package in thin_packages: members = [ member_info.member for member_info in thin_package.members.normals ] archiver.extract(thin_package.filename, members, tmp_repack_dir) # Re-pack the final fat archive archiver.create(fat_package_filename, tmp_repack_dir)
def _make_test_tm(self): amt = AMT(recipes=RECIPES.KNIFE, debug=self.DEBUG) amt.publish('knife;6.6.6;0;0;linux;release;x86_64;ubuntu;18') root_dir = temp_file.make_temp_dir(delete=not self.DEBUG) tools_dir = path.join(root_dir, 'tools') if self.DEBUG: print('\ntools_manager dir:\n%s' % (tools_dir)) return TM(tools_dir, self.TEST_BUILD_TARGET, amt.am), amt.am, amt
def _make_empty_pm(clazz): root_dir = temp_file.make_temp_dir(delete=not clazz.DEBUG) pm_dir = path.join(root_dir, 'package_manager') am_dir = path.join(root_dir, 'artifact_manager') if clazz.DEBUG: print("root_dir:\n%s\n" % (root_dir)) am = artifact_manager_local(am_dir) return package_manager(pm_dir, am)
def __make_tmp_files(self): tmp_dir = temp_file.make_temp_dir() file_util.save(path.join(tmp_dir, 'foo.txt'), content = 'foo.txt\n') file_util.save(path.join(tmp_dir, 'bar.txt'), content = 'bar.txt\n') file_util.save(path.join(tmp_dir, 'kiwi.jpg'), content = 'kiwi.jpg\n') file_util.save(path.join(tmp_dir, 'kiwi.png'), content = 'kiwi.png\n') file_util.save(path.join(tmp_dir, 'orange.png'), content = 'orange.png\n') return tmp_dir
def _command_test(self, bt, package_tarball, test, artifacts_dir, tools_dir, tmp_dir, opts, verbose): parsed_opts = key_value_parser.parse_to_dict(opts) opts = parsed_opts if 'build_level' in opts and bt == build_target.DEFAULT: bt == opts['build_level'] bt = build_level.parse_level(bt) opts['build_level'] = bt build_blurb.set_process_name('package_tester') build_blurb.set_verbose(bool(verbose)) if not path.isfile(test): raise RuntimeError('Test not found: %s' % (test)) tmp_dir = None if tmp_dir: tmp_dir = tmp_dir else: tmp_dir = temp_file.make_temp_dir(delete=False) file_util.mkdir(tmp_dir) if not path.isdir(artifacts_dir): raise RuntimeError('Not an artifacts directory: %s' % (artifacts_dir)) if not path.isdir(tools_dir): raise RuntimeError('Not an tools directory: %s' % (tools_dir)) am = artifact_manager_local(artifacts_dir) tm = tools_manager(tools_dir, self.build_target, am) build_blurb.blurb('tester', ' build_target: %s' % (str(self.build_target))) build_blurb.blurb('tester', ' tmp_dir: %s' % (tmp_dir)) build_blurb.blurb('tester', 'artifacts_dir: %s' % (artifacts_dir)) if not package.is_package(package_tarball): raise RuntimeError('Not a valid package: %s' % (package_tarball)) test_dir = path.join(tmp_dir, 'test') source_dir = path.dirname(test) #test_config = namedtuple('test_config', 'script,package_tarball,artifact_manager,tools_manager,extra_env') test_config = package_tester.test_config( None, package_tarball, am, tm, []) # source_dir, test_dir, am, tm, target) tester = package_tester(test_config, test) result = tester.run() if not result.success: print("result: ", result) return 1 return 0
def test_extract_all(self): assert self.default_archive_type items = temp_archive.make_temp_item_list([ ( 'foo.txt', 'foo.txt\n' ), ]) tmp_archive = self.make_temp_archive_for_reading(items) tmp_dir = temp_file.make_temp_dir() tmp_archive.extract_all(tmp_dir) self.assertTrue( path.isfile(path.join(tmp_dir, 'foo.txt')) )
def extract_member_to_file(self, member, filename): tmp_dir = temp_file.make_temp_dir() tmp_member = path.join(tmp_dir, member) self.extract(tmp_dir, include = [ member ]) if not path.exists(tmp_member): raise RuntimeError('Failed to extract member: %s' % (member)) if not path.isfile(tmp_member): raise RuntimeError('Member is not a file: %s' % (member)) file_util.rename(tmp_member, filename)
def make_temp_repo(clazz, address = None, content = None, delete = True): tmp_dir = temp_file.make_temp_dir(delete = delete) r = repo(tmp_dir, address = address) r.init() if content: r.write_temp_content(content) r.add('.') r.commit('add temp content', '.') return r
def make_temp_repo(clazz, content = None): tmp_dir = temp_file.make_temp_dir() r = repo(tmp_dir, address = None) r.init() if content: check.check_string_seq(content) r.write_temp_content(content) r.add('.') r.commit('add temp repo content', '.') return r
def save(self): 'Save the db from its source.' db_json = self._dict_db.to_json() tmp_dir = temp_file.make_temp_dir() tmp_db_filename = path.join(tmp_dir, self.DB_FILENAME) file_util.save(tmp_db_filename, content=db_json) self._pcloud.upload_file(tmp_db_filename, path.basename(self._remote_db_path), folder_path=path.dirname( self._remote_db_path))
def _test_create_with_include_exclude(self, items, include, exclude): tmp_dir = temp_archive.write_temp_items(items) archive = self.make_temp_archive_for_writing() archive.create(tmp_dir, include = include, exclude = exclude) self.assertTrue( path.isfile(archive.filename) ) tmp_extract_dir = temp_file.make_temp_dir() archive.extract_all(tmp_extract_dir) actual_files = file_find.find(tmp_extract_dir, relative = True) file_util.remove([ tmp_dir, tmp_extract_dir]) return actual_files
def _make_temp_archive_dmg(clazz, items, filename, mode): tmp_dir = temp_file.make_temp_dir() for item in items: assert item assert item.arcname file_util.save(path.join(tmp_dir, item.arcname), content = item.content) tmp_dmg = temp_file.make_temp_file() cmd = 'hdiutil create -srcfolder %s -ov -format UDZO %s' % (tmp_dir, filename) execute.execute(cmd) file_util.remove(tmp_dir)
def autoconf_help(clazz, tarball): 'Return the output of configure --help for an autoconf archive.' tmp_dir = temp_file.make_temp_dir() archiver.extract(tarball, tmp_dir, strip_common_ancestor = True) confiugure_path = path.join(tmp_dir, 'configure') if not path.exists(confiugure_path): raise RuntimeError('No configure script found in %s' % (tarball)) help = execute.execute('./configure --help', cwd = tmp_dir, shell = True, raise_error = False).stdout file_util.remove(tmp_dir) return help
def create(self, root_dir, base_dir = None, extra_items = None, include = None, exclude = None): self._pre_create() items = self._find(root_dir, base_dir, extra_items, include, exclude) tmp_dir = temp_file.make_temp_dir() for item in items: file_util.copy(item.filename, path.join(tmp_dir, item.arcname)) cmd = 'hdiutil create -srcfolder %s -ov -format UDZO %s' % (tmp_dir, self.filename) execute.execute(cmd) file_util.remove(tmp_dir)
def test_extract_thin(self): expected_files = [ 'thin_cherry.o', 'thin_kiwi.o', ] tmp_dir = temp_file.make_temp_dir() ar_replacement.extract(self._test_file('thin_fruits_x86_64.a'), tmp_dir) actual_files = dir_util.list(tmp_dir, relative=True) actual_files = self._filter_contents(actual_files) self.assertEqual(expected_files, actual_files)
def __init__(self, root_dir=None, debug=False, recipes=None, filename=None): root_dir = root_dir or temp_file.make_temp_dir(suffix='.artifacts') self._debug = debug self.am = artifact_manager_local(root_dir) self._recipes = {} if recipes: self.add_recipes(recipes, filename=filename)
def test_cached_filename(self): tmp_root_dir = temp_file.make_temp_dir(prefix = 'test_cached_root_', suffix = '.dir', delete = not self.DEBUG) tmp_filename = temp_file.make_temp_file(content = 'foo\n', delete = not self.DEBUG) if self.DEBUG: print("\ntmp_root_dir: ", tmp_root_dir) print("tmp_filename: ", tmp_filename) expected_content = file_util.read(tmp_filename) cached_filename = file_cache.cached_filename(tmp_filename, root_dir = tmp_root_dir) actual_content = file_util.read(cached_filename) self.assertEqual( expected_content, actual_content ) self.assertNotEqual( tmp_filename, cached_filename )
def test_extract_all_with_base_dir_and_strip_common_ancestor(self): assert self.default_archive_type base_dir_to_strip = 'base-1.2.3' items = temp_archive.make_temp_item_list([ ( 'foo.txt', 'foo.txt\n' ), ]) items = temp_archive.add_base_dir(items, base_dir_to_strip) base_dir_to_add = 'added-6.6.6' tmp_archive = self.make_temp_archive_for_reading(items) tmp_dir = temp_file.make_temp_dir() tmp_archive.extract_all(tmp_dir, base_dir = base_dir_to_add, strip_common_ancestor = True) self.assertTrue( path.isfile(path.join(tmp_dir, base_dir_to_add, 'foo.txt')) )
def test_replace_fat(self): expected_objects = [ 'fat_cherry.o', 'fat_kiwi.o', ] tmp_dir = temp_file.make_temp_dir() tmp_archive = path.join(tmp_dir, 'fat_fruits.a') objects = [self._test_file(o) for o in expected_objects] ar_replacement.replace(tmp_archive, objects) actual_objects = ar_replacement.contents(tmp_archive) actual_objects = self._filter_contents(actual_objects) self.assertEqual(expected_objects, actual_objects)
def _make_temp_archive_xz(clazz, items, filename, mode): tmp_dir = temp_file.make_temp_dir() for item in items: assert item assert item.arcname file_util.save(path.join(tmp_dir, item.arcname), content = item.content) tmp_xz = temp_file.make_temp_file() manifest_content = '\n'.join([ item.arcname for item in items ]) manifest = temp_file.make_temp_file(content = manifest_content) cmd = 'tar Jcf %s -C %s -T %s' % (filename, tmp_dir, manifest) execute.execute(cmd) file_util.remove(tmp_dir)
def _load_builder_script(self, filename): bt = build_target() config = builder_config() config.build_root = temp_file.make_temp_dir() config.no_network = True config.no_checksums = True config.source_dir = path.dirname(filename) config.verbose = True env = builder_env(config, [filename]) sm = builder_script_manager([filename], bt, env) self.assertEqual(1, len(sm.scripts)) return sm.scripts.values()[0]
def test_make_temp_archive_from_file(self): tmp_file = temp_file.make_temp_file(content = 'foo.txt\n', suffix = '.foo.txt') tmp_archive = self._make_temp_archive('tgz', items = [ temp_archive.item('foo.txt', filename = tmp_file) ]) self.assertTrue( path.isfile(tmp_archive.filename) ) self.assertTrue( tarfile.is_tarfile(tmp_archive.filename) ) self.assertFalse( zipfile.is_zipfile(tmp_archive.filename) ) tmp_dir = temp_file.make_temp_dir() with tarfile.open(tmp_archive.filename, mode = 'r') as archive: archive.extractall(path = tmp_dir) tmp_member_path = path.join(tmp_dir, 'foo.txt') self.assertTrue( path.isfile(tmp_member_path) ) self.assertEqual( b'foo.txt\n', file_util.read(tmp_member_path) )
def test_clone_or_pull(self): tmp_repo = self._create_tmp_repo() new_files = self._create_tmp_files(tmp_repo) git.add(tmp_repo, new_files) git.commit(tmp_repo, 'nomsg\n', '.') cloned_tmp_repo = temp_file.make_temp_dir() git.clone(tmp_repo, cloned_tmp_repo) expected_cloned_files = [ path.join(cloned_tmp_repo, path.basename(f)) for f in new_files ] for f in expected_cloned_files: self.assertTrue( path.exists(f) )
def create(self, root_dir, base_dir = None, extra_items = None, include = None, exclude = None): self._pre_create() items = self._find(root_dir, base_dir, extra_items, include, exclude) tmp_dir = temp_file.make_temp_dir() for item in items: file_util.copy(item.filename, path.join(tmp_dir, item.arcname)) manifest_content = '\n'.join([ item.arcname for item in items ]) manifest = temp_file.make_temp_file(content = manifest_content) cmd = 'tar Jcf %s -C %s -T %s' % (self.filename, tmp_dir, manifest) execute.execute(cmd) file_util.remove(tmp_dir)
def test_copy_tree_with_tar(self): self.maxDiff = None src_tmp_dir = temp_file.make_temp_dir(delete = not self.DEBUG) dst_tmp_dir = temp_file.make_temp_dir(delete = not self.DEBUG) with tarfile.open(self.data_path('test.tar'), mode = 'r') as f: f.extractall(path = src_tmp_dir) tar_util.copy_tree_with_tar(src_tmp_dir, dst_tmp_dir) expected_files = [ '1', '1/2', '1/2/3', '1/2/3/4', '1/2/3/4/5', '1/2/3/4/5/apple.txt', '1/2/3/4/5/kiwi.txt', 'bar.txt', 'empty', 'foo.txt', 'kiwi_link.txt', ] actual_files = file_find.find(dst_tmp_dir, file_type = file_find.ANY) self.assertEqual( expected_files, actual_files )
def test_fat_to_thin(self): tmp_dir = temp_file.make_temp_dir() fat_archive = self._test_file('fat_fruits.a') for arch in lipo.POSSIBLE_ARCHS: thin_archive = path.join(tmp_dir, '%s_fruits.a' % (arch)) fat_checksum_before = file_checksum.file_checksum( fat_archive, 'sha256') lipo.fat_to_thin(fat_archive, thin_archive, arch) self.assertTrue(path.exists(thin_archive)) self.assertEqual( fat_checksum_before, file_checksum.file_checksum(fat_archive, 'sha256')) self.assertEqual([arch], self._archs(thin_archive))
def _test_extract_with_members(self, items, members, base_dir = None, strip_common_ancestor = False, strip_head = None): tmp_archive = self.make_temp_archive_for_reading(items) tmp_dir = temp_file.make_temp_dir() tmp_archive.extract(tmp_dir, base_dir = base_dir, strip_common_ancestor = strip_common_ancestor, strip_head = strip_head, include = members) actual_files = file_find.find(tmp_dir, relative = True) file_util.remove(tmp_dir) return actual_files
def _contents_darwin_fat(clazz, archive, tools): 'Return the archive contents for a fat archive on darwin.' tmp_dir = temp_file.make_temp_dir() thin_libs = clazz._fat_to_thin(archive, tmp_dir, tools) expected_contents = None for arch, lib in thin_libs: if not expected_contents: expected_contents = clazz._contents_with_ar(lib, tools) assert expected_contents else: contents = clazz._contents_with_ar(lib, tools) if contents != expected_contents: raise RuntimeError('Unexpected contents for thin lib: %s' % (lib)) return expected_contents
def test_update_egg_directory(self): tmp_dir = temp_file.make_temp_dir() eggs = [ 'foo-1.2.3-py2.7.egg', 'bar-6.6.6-py2.7.egg', 'baz-10.11.12-py2.7.egg', ] for egg in eggs: file_util.save(path.join(tmp_dir, egg), content = '%s\n' % (egg)) setup_tools.update_egg_directory(tmp_dir) easy_install_dot_pth_path = path.join(tmp_dir, setup_tools.EASY_INSTALL_DOT_PTH_FILENAME) actual_eggs = setup_tools.read_easy_install_pth(easy_install_dot_pth_path) self.assertEqual( sorted(eggs), sorted(actual_eggs) )
def _extract_darwin_fat(clazz, archive, dest_dir, tools): 'Extract fat archive.' tmp_dir = temp_file.make_temp_dir() thin_libs = clazz._fat_to_thin(archive, tmp_dir, tools) expected_contents = None file_util.mkdir(dest_dir) for arch, lib in thin_libs: objects_dir = lib + '.objdir' clazz._extract_with_ar(lib, objects_dir, tools) files = dir_util.list(objects_dir, relative = True) for f in files: src = path.join(objects_dir, f) dst_basename = arch + '_' + f dst = path.join(dest_dir, dst_basename) shutil.move(src, dst)
def extract_members(self, members, dest_dir, base_dir = None, strip_common_ancestor = False, strip_head = None, include = None, exclude = None): # Cheat by using a temporary zip file to do the actual work. Super innefecient but # easy since theres no library to extract just some stuff from dmg files. tmp_dir = temp_file.make_temp_dir() dmg.extract(self.filename, tmp_dir) tmp_zip = temp_file.make_temp_file(suffix = '.zip') az = archive_zip(tmp_zip) az.create(tmp_dir) az.extract_members(members, dest_dir, base_dir = base_dir, strip_common_ancestor = strip_common_ancestor, strip_head = strip_head, include = include, exclude = exclude) file_util.remove(tmp_zip) file_util.remove(tmp_dir)
def create(self, root_dir, base_dir = None, extra_items = None, include = None, exclude = None): items = self._find(root_dir, base_dir, extra_items, include, exclude) ext = archive_extension.extension_for_filename(self.filename) mode = archive_extension.write_format_for_filename(self.filename) # print('CACA: ext=%s' % (ext)) # print('CACA: mode=%s' % (mode)) tmp_dir = temp_file.make_temp_dir() for item in items: file_util.copy(item.filename, path.join(tmp_dir, item.arcname)) manifest_content = '\n'.join([ item.arcname for item in items ]) manifest = temp_file.make_temp_file(content = manifest_content) cmd = 'tar Jcf %s -C %s -T %s' % (self.filename, tmp_dir, manifest) execute.execute(cmd) file_util.remove(tmp_dir)
def test_save_executable(self): template = '#!/bin/bash\necho @FOO@ @BAR@' basename = 'foo.sh' script = manager_script(template, basename) tmp_root_dir = temp_file.make_temp_dir() variables = { '@FOO@': 'foo', '@BAR@': 'bar', } save_rv = script.save(tmp_root_dir, variables) tmp_filename = path.join(tmp_root_dir, basename) self.assertTrue(path.exists(tmp_filename)) content = file_util.read(tmp_filename, 'utf-8') expected_content = '#!/bin/bash\necho foo bar' self.assertEqual(expected_content, content) self.assertEqual(file_util.mode(tmp_filename), 0o755)
def test_patch(self): p = self.data_path('src_to_dst.patch') src = self.data_path('src.txt') dst = self.data_path('dst.txt') tmp_dir = temp_file.make_temp_dir() tmp_src = path.join(tmp_dir, 'src.txt') backup_src = tmp_src + '.orig' file_util.copy(src, tmp_src) patch.patch(p, cwd=tmp_dir, strip=0, backup=True, posix=True) self.assertEqual(file_util.read(dst), file_util.read(tmp_src)) self.assertTrue(path.exists(backup_src)) self.assertEqual(file_util.read(src), file_util.read(backup_src))
def get_tarball(self, address, revision): 'Return the local filesystem path to the tarball with address and revision.' local_address_path = self.path_for_address(address) tarball_filename = '%s.tar.gz' % (revision) tarball_path = path.join(local_address_path, tarball_filename) if path.exists(tarball_path): return tarball_path tmp_dir = temp_file.make_temp_dir() if path.isdir(address): name = path.basename(address) else: name = git_util.name_from_address(address) tmp_full_path = path.join(tmp_dir, tarball_filename) git.download_tarball(name, revision, address, tmp_full_path) file_util.rename(tmp_full_path, tarball_path) return tarball_path
def make_test_package(clazz, pm, debug = False): pkg_config_pc_contnet = clazz.make_pkg_config_pc_content(pm.name, pm.build_version) script_content = '#!/bin/bash\necho %s-%s\nexit 0\n' % (pm.name, pm.build_version) name = pm.name.replace('_conflict', '') items = [ clazz.make_temp_item(name, pm.version, '_foo.txt', 'docs'), clazz.make_temp_item(name, pm.version, '_bar.txt', 'docs'), clazz.make_temp_item(name, pm.version, '_script.sh', 'bin', content = script_content, mode = 0o755), temp_item('lib/pkgconfig/%s.pc' % (name), content = pkg_config_pc_contnet) ] tmp_stage_dir = temp_file.make_temp_dir(delete = not debug) tmp_stage_files_dir = path.join(tmp_stage_dir, 'files') temp_file.write_temp_files(tmp_stage_files_dir, items) tmp_tarball = temp_file.make_temp_file(prefix = pm.package_descriptor.full_name, suffix = '.tar.gz', delete = not debug) package.create_package(tmp_tarball, pm.package_descriptor, pm.build_target, tmp_stage_dir) return clazz.test_package(tmp_tarball, pm)
def test_extract_fat(self): expected_files = [ 'arm64_fat_cherry.o', 'arm64_fat_kiwi.o', 'armv7_fat_cherry.o', 'armv7_fat_kiwi.o', 'i386_fat_cherry.o', 'i386_fat_kiwi.o', 'x86_64_fat_cherry.o', 'x86_64_fat_kiwi.o', ] tmp_dir = temp_file.make_temp_dir() ar_replacement.extract(self._test_file('fat_fruits.a'), tmp_dir) actual_files = dir_util.list(tmp_dir, relative=True) actual_files = self._filter_contents(actual_files) self.assertEqual(expected_files, actual_files)
def make_artifact_manager(clazz, debug=False, recipes=None, build_target=None, mutations=None): root_dir = temp_file.make_temp_dir(delete=not debug) if debug: print("root_dir:\n%s\n" % (root_dir)) am = artifact_manager_local(root_dir) if recipes: mutations = mutations or {} check.check_build_target(build_target) tmp_packages = fake_package_unit_test.create_many_packages( recipes, mutations) for tmp_package in tmp_packages: am.publish(tmp_package, build_target, False, None) return am
def test_thin_to_fat(self): tmp_dir = temp_file.make_temp_dir() thin_archives = [ self._test_file('lib%s.a' % (arch)) for arch in lipo.POSSIBLE_ARCHS ] fat_archive = path.join(tmp_dir, 'tmp_fat_fruits.a') thin_checksums_before = [ file_checksum.file_checksum(thin_archive, 'sha256') for thin_archive in thin_archives ] lipo.thin_to_fat(thin_archives, fat_archive) thin_checksums_after = [ file_checksum.file_checksum(thin_archive, 'sha256') for thin_archive in thin_archives ] self.assertTrue(path.exists(fat_archive)) self.assertEqual(thin_checksums_before, thin_checksums_after) self.assertEqual(lipo.POSSIBLE_ARCHS, self._archs(fat_archive))
def _process_file(self, filename): if not path.isfile(filename): raise IOError('Not a file: %s' % (filename)) env = testing_recipe_load_env() recipes = builder_recipe_loader.load(env, filename) for recipe in recipes: values = recipe.steps[0].resolve_values({}, env) tarball_address = values.get('tarball_address') if tarball_address: tarball_address_address = tarball_address.address old_revision = tarball_address.revision gr = repo(temp_file.make_temp_dir(), address = tarball_address_address) gr.clone() new_revision = gr.last_commit_hash(short_hash = True) if old_revision == new_revision: return 0 replacements = { old_revision: new_revision } print('%s: update %s -> %s' % (filename, old_revision, new_revision)) file_replace.replace(filename, replacements, backup = False, word_boundary = True) return 0
def extract(self, root_dir, stuff_dir_basename, env_dir_basename): tmp_dir = temp_file.make_temp_dir(prefix='package.extract.', suffix='.dir', dir=root_dir) dst_stuff_dir = path.join(root_dir, stuff_dir_basename) dst_env_dir = path.join(root_dir, env_dir_basename) file_util.mkdir(dst_stuff_dir) file_util.mkdir(dst_env_dir) # tar cmd is 10x faster than archiver. need to fix archiver tar_cmd = ['tar', 'xf', self.tarball, '-C', tmp_dir] execute.execute(tar_cmd) #archiver.extract_all(self.tarball, tmp_dir) src_stuff_dir = path.join(tmp_dir, self.FILES_DIR) src_env_dir = path.join(tmp_dir, self.ENV_DIR) if path.isdir(src_stuff_dir): dir_util.move_files(src_stuff_dir, dst_stuff_dir) self._post_install_hooks(dst_stuff_dir) if path.isdir(src_env_dir): dir_util.move_files(src_env_dir, dst_env_dir) self._variable_substitution_hook(dst_env_dir, dst_stuff_dir) file_util.remove(tmp_dir)
def test_replace_add_thin(self): expected_objects = [ 'thin_cherry.o', 'thin_kiwi.o', ] tmp_dir = temp_file.make_temp_dir() tmp_archive = path.join(tmp_dir, 'thin_fruits.a') objects = [self._test_file(o) for o in expected_objects] ar_replacement.replace(tmp_archive, objects) actual_objects = ar_replacement.contents(tmp_archive) actual_objects = self._filter_contents(actual_objects) self.assertEqual(expected_objects, actual_objects) expected_objects = [ 'thin_cherry.o', 'thin_kiwi.o', 'thin_x86_64_avocado.o', ] objects = [self._test_file(o) for o in ['thin_x86_64_avocado.o']] ar_replacement.replace(tmp_archive, objects) actual_objects = ar_replacement.contents(tmp_archive) actual_objects = self._filter_contents(actual_objects) self.assertEqual(expected_objects, actual_objects)
def test_clone_or_pull(self): r1 = temp_git_repo.make_temp_repo() r1.write_temp_content([ 'file a/b/c/foo.txt "foo content" 755', 'file d/e/bar.txt "bar content" 644', 'dir baz "" 700', ]) r1.add('.') r1.commit('foo', '.') tmp_dir = temp_file.make_temp_dir() r2 = repo(tmp_dir, address = r1.root) r2.clone_or_pull() self.assertEqual([ 'a/b/c/foo.txt', 'd/e/bar.txt'], r2.find_all_files() ) r1.write_temp_content([ 'file kiwi.txt "kiwi" 644', ]) r1.add('kiwi.txt') r1.commit('foo', 'kiwi.txt') r2.pull() self.assertEqual([ 'a/b/c/foo.txt', 'd/e/bar.txt', 'kiwi.txt' ], r2.find_all_files() )
def make_template_tarball(root, template_name, template_version): tmp_dir = temp_file.make_temp_dir(delete = not DEBUG) if DEBUG: print('DEBUG2: tmp_dir=%s' % (tmp_dir)) full_name = '%s-%s' % (template_name, template_version) template_dir = path.join(root, 'template') tar_util.copy_tree_with_tar(template_dir, tmp_dir) working_dir = path.join(tmp_dir, full_name) tarball_filename = '%s.tar.gz' % (full_name) command = [ 'cd %s' % (working_dir), 'automake -a', 'autoconf', './configure', 'make dist', 'cp %s %s' % (tarball_filename, root), ] env = os_env.make_clean_env(keep_keys = [ 'PATH', 'PKG_CONFIG_PATH' ]) env['GZIP'] = '-n' execute.execute(' && '.join(command), shell = True, non_blocking = True, env = env) result = path.join(root, '%s-%s.tar.gz' % (template_name, template_version)) assert path.isfile(result) return result