def test_move_files(self): tmp_dir = temp_file.make_temp_dir() src_dir = path.join(tmp_dir, 'src') dst_dir = path.join(tmp_dir, 'dst') file_util.mkdir(dst_dir) temp_content.write_items([ 'file foo.txt "This is foo.txt\n" 644', 'file bar.txt "This is bar.txt\n" 644', 'file sub1/sub2/baz.txt "This is baz.txt\n" 644', 'file yyy/zzz/vvv.txt "This is vvv.txt\n" 644', 'file .hidden "this is .hidden\n" 644', 'file script.sh "#!/bin/bash\necho script.sh\nexit 0\n" 755', 'file .hushlogin "" 644', ], src_dir) expected = [ '.hidden', '.hushlogin', 'bar.txt', 'foo.txt', 'script.sh', 'sub1/sub2/baz.txt', 'yyy/zzz/vvv.txt', ] self.assertEqual( expected, file_find.find(src_dir, relative = True)) dir_util.move_files(src_dir, dst_dir) self.assertEqual( expected, file_find.find(dst_dir, relative = True)) self.assertEqual( [], file_find.find(src_dir, relative = True))
def create_package(clazz, tarball_path, pkg_desc, build_target, stage_dir, timer=None): timer = timer or debug_timer('package', disabled=True) properties = dict_util.filter_without_keys( pkg_desc.properties, ['export_compilation_flags_requirements']) # Hack the export_compilation_flags_requirements property to be a plain # string list instead of the masked config it is key = 'export_compilation_flags_requirements' if key in pkg_desc.properties: properties[key] = [str(x) for x in pkg_desc.properties[key]] files_dir = path.join(stage_dir, 'files') timer.start('create_package - find files') files = file_find.find(files_dir, relative=True, file_type=file_find.FILE | file_find.LINK) timer.stop() timer.start('create_package - files checksums') files_checksum_list = file_checksum_list.from_files(files, root_dir=files_dir) timer.stop() env_files_dir = path.join(stage_dir, 'env') timer.start('create_package - find env_files') if path.isdir(env_files_dir): env_files = file_find.find(env_files_dir, relative=True, file_type=file_find.FILE | file_find.LINK) else: env_files = [] timer.stop() timer.start('create_package - env_files checksums') env_files_checksum_list = file_checksum_list.from_files( env_files, root_dir=env_files_dir) timer.stop() pkg_files = package_files(files_checksum_list, env_files_checksum_list) metadata = package_metadata( '', pkg_desc.name, pkg_desc.version.upstream_version, pkg_desc.version.revision, pkg_desc.version.epoch, build_target.system, build_target.level, build_target.arch, build_target.distro, build_target.distro_version, pkg_desc.requirements, properties, pkg_files) metadata_filename = path.join(stage_dir, clazz.METADATA_FILENAME) file_util.save(metadata_filename, content=metadata.to_json()) clazz._create_package(tarball_path, stage_dir, timer) return clazz._create_package_result(tarball_path, metadata)
def _compare_dirs(self, expected_dir, actual_dir, transform = None): # FIXME: How to pop this state ? self.maxDiff = None expected_files = file_find.find(expected_dir, relative = True) actual_files = file_find.find(actual_dir, relative = True) if transform: actual_files = [ transform(f) for f in actual_files ] self.assertEqual( expected_files, actual_files )
def test_installed_files_only_env_files(self): recipe = ''' fake_package files 1.0.0 0 0 linux release x86_64 ubuntu 18 env_files foo.sh \#@REBUILD_HEAD@ export FOO=foo \#@REBUILD_TAIL@ bar.sh \#@REBUILD_HEAD@ export BAR=bar \#@REBUILD_TAIL@ ''' amt = self._make_test_amt( recipe, 'files;1.0.0;0;0;linux;release;x86_64;ubuntu;18') pm = self._make_caca_test_pm(amt.am) self._install_package(pm, 'files-1.0.0', 'linux-ubuntu-18/x86_64/release') self.assertEqual(['files-1.0.0'], pm.list_all(include_version=True)) expected = [ 'db/packages.db', 'env/bar.sh', 'env/foo.sh', 'env/framework/env/bes_framework.sh', ] self.assertEqual(expected, file_find.find(pm.root_dir, relative=True))
def test_sync(self): tmp_dst_dir = temp_file.make_temp_dir(delete = not self.DEBUG) files = [ 'files', 'files/1', 'files/1/2', 'files/1/2/3', 'files/1/2/3/4', 'files/1/2/3/4/5', 'files/1/2/3/4/5/apple.txt', 'files/1/2/3/4/5/kiwi.txt', 'files/bar.txt', 'files/empty', 'files/foo.txt', 'files/kiwi_link.txt', ] src_dir = temp_file.make_temp_dir(delete = not self.DEBUG) tar_util.copy_tree_with_tar(self.data_dir(), src_dir) num_links_before = self.num_links(src_dir) sync.sync_files(src_dir, tmp_dst_dir, files, 'foo') actual_files = file_find.find(tmp_dst_dir, file_type = file_find.ANY) self.assertEqual( actual_files, files ) num_links_after = self.num_links(src_dir) self.assertEqual( num_links_before, [ n - 1 for n in num_links_after ] )
def test_extract_all(self): tmp_dir = temp_file.make_temp_dir() info1 = dmg.info() dmg.extract(self.data_path('example.dmg'), tmp_dir) info2 = dmg.info() files = file_find.find(tmp_dir, relative = True, file_type = file_find.FILE_OR_LINK) self.assertEqual( [ 'foo.txt', 'link_to_foo.sh', 'subdir/bar.txt' ], files )
def _test_extract_with_include_exclude(self, items, include, exclude): tmp_archive = self.make_temp_archive_for_reading(items) tmp_dir = temp_file.make_temp_dir() tmp_archive.extract(tmp_dir, include = include, exclude = exclude) actual_files = file_find.find(tmp_dir, relative = True) file_util.remove(tmp_dir) return actual_files
def test_extract_all_overlap(self): assert self.default_archive_type items1 = temp_archive.make_temp_item_list([ ( 'base-1.2.3/foo.txt', 'foo.txt\n' ), ( 'base-1.2.3/bar.txt', 'bar.txt\n' ), ]) items2 = temp_archive.make_temp_item_list([ ( 'base-1.2.3/orange.txt', 'orange.txt\n' ), ( 'base-1.2.3/kiwi.txt', 'kiwi.txt\n' ), ]) tmp_archive1 = self.make_temp_archive_for_reading(items1) tmp_archive2 = self.make_temp_archive_for_reading(items2) tmp_dir = temp_file.make_temp_dir() tmp_archive1.extract_all(tmp_dir) tmp_archive2.extract_all(tmp_dir) actual_files = file_find.find(tmp_dir, relative = True) expected_files = [ 'base-1.2.3/bar.txt', 'base-1.2.3/foo.txt', 'base-1.2.3/kiwi.txt', 'base-1.2.3/orange.txt', ] self.assertEqual( expected_files, actual_files )
def test_create_from_packages(self): tmp_dir = temp_file.make_temp_dir(delete=not self.DEBUG) jail_config_content = ''' [jail] description: test packages: %s [%s] ''' % (self.__PACKAGE_ID, self.__PACKAGE_ID) tmp_jail_config = temp_file.make_temp_file(content=jail_config_content) cmd = [ self.__BES_JAIL_PY, 'create', tmp_dir, tmp_jail_config, ] rv = os_env.call_python_script(cmd) print(rv.stdout) self.assertEqual(0, rv.exit_code) expected_files = npm.package_contents(self.__PACKAGE_ID) actual_files = file_find.find(tmp_dir, file_type=file_find.ANY, relative=True) actual_files = [path.join('/', f) for f in actual_files] self.assertEqual(expected_files, actual_files)
def test_extract_all_overlap_with_base_dir_and_strip_common_ancestor(self): assert self.default_archive_type items1 = temp_archive.make_temp_item_list([ ( 'base-1.2.3/foo.txt', 'foo.txt\n' ), ( 'base-1.2.3/subdir/bar.txt', 'bar.txt\n' ), ]) items2 = temp_archive.make_temp_item_list([ ( 'notbase-1.2.3/orange.txt', 'orange.txt\n' ), ( 'notbase-1.2.3/subdir/kiwi.txt', 'kiwi.txt\n' ), ]) tmp_archive1 = self.make_temp_archive_for_reading(items1) tmp_archive2 = self.make_temp_archive_for_reading(items2) tmp_dir = temp_file.make_temp_dir() base_dir = 'foo-6.6.6' tmp_archive1.extract_all(tmp_dir, base_dir = base_dir, strip_common_ancestor = True) tmp_archive2.extract_all(tmp_dir, base_dir = base_dir, strip_common_ancestor = True) actual_files = file_find.find(tmp_dir, relative = True) expected_files = [ 'foo-6.6.6/foo.txt', 'foo-6.6.6/orange.txt', 'foo-6.6.6/subdir/bar.txt', 'foo-6.6.6/subdir/kiwi.txt', ] self.assertEqual( expected_files, actual_files )
def _find(clazz, root_dir, base_dir, extra_items, include, exclude): files = file_find.find(root_dir, relative = True, file_type = file_find.FILE | file_find.LINK) items = [] if include: include_matcher = matcher_multiple_filename(include) else: include_matcher = matcher_always_true() if exclude: exclude_matcher = matcher_multiple_filename(exclude) else: exclude_matcher = matcher_always_false() for f in files: filename = path.join(root_dir, f) if base_dir: arcname = path.join(base_dir, f) else: arcname = f should_include = include_matcher.match(f) should_exclude = exclude_matcher.match(f) if should_include and not should_exclude: items.append(clazz.Item(filename, arcname)) return items + (extra_items or [])
def find_all_files(self): #crit = [ # file_type_criteria(file_type.DIR | file_type.FILE | file_type.LINK), #] #ff = finder(self.root, criteria = crit, relative = True) #return [ f for f in ff.find() ] files = file_find.find(self.root, relative = True, file_type = file_find.FILE|file_find.LINK) files = [ f for f in files if not f.startswith('.git') ] return files
def find_sources(clazz, directory): files = file_find.find(directory, relative=True, file_type=file_find.FILE | file_find.LINK) sources = [] for f in files: if archiver.is_valid(path.join(directory, f)): sources.append(f) return sources
def _test_create_with_include_exclude(self, items, include, exclude): tmp_dir = temp_archive.write_temp_items(items) archive = self.make_temp_archive_for_writing() archive.create(tmp_dir, include = include, exclude = exclude) self.assertTrue( path.isfile(archive.filename) ) tmp_extract_dir = temp_file.make_temp_dir() archive.extract_all(tmp_extract_dir) actual_files = file_find.find(tmp_extract_dir, relative = True) file_util.remove([ tmp_dir, tmp_extract_dir]) return actual_files
def find(clazz, dirs, name, version): dirs = object_util.listify(dirs) filenames = [] for d in dirs: if path.isdir(d): filenames += file_find.find(d, max_depth=4, relative=False, file_type=file_find.FILE | file_find.LINK) return clazz.find_in_list(filenames, name, version)
def _test_extract_with_members(self, items, members, base_dir = None, strip_common_ancestor = False, strip_head = None): tmp_archive = self.make_temp_archive_for_reading(items) tmp_dir = temp_file.make_temp_dir() tmp_archive.extract(tmp_dir, base_dir = base_dir, strip_common_ancestor = strip_common_ancestor, strip_head = strip_head, include = members) actual_files = file_find.find(tmp_dir, relative = True) file_util.remove(tmp_dir) return actual_files
def patch(clazz, patches, cwd, strip=1, backup=True, posix=True, program=None): 'Apply the given patches by calling patch with strip, backup and posix.' patches = object_util.listify(patches) target_files = file_find.find(cwd, relative=True) for patch in patches: rv = clazz._call_patch(patch, cwd, strip, backup, posix, program) if rv[0] != 0: sys.stderr.write(rv[1]) sys.stderr.flush() return rv return (0, None)
def test_ensure_tool(self): tm, am, amt = self._make_test_tm() knife_desc = PD.parse('knife-6.6.6') tm.ensure_tools(knife_desc) self.assertEqual([ 'knife_6_6_6/linux-ubuntu-18/x86_64/db/packages.db', 'knife_6_6_6/linux-ubuntu-18/x86_64/env/framework/env/bes_framework.sh', 'knife_6_6_6/linux-ubuntu-18/x86_64/env/knife_env.sh', 'knife_6_6_6/linux-ubuntu-18/x86_64/run.sh', 'knife_6_6_6/linux-ubuntu-18/x86_64/setup.sh', 'knife_6_6_6/linux-ubuntu-18/x86_64/stuff/bin/cut.exe', 'knife_6_6_6/linux-ubuntu-18/x86_64/stuff/bin/cut.sh', 'knife_6_6_6/linux-ubuntu-18/x86_64/stuff/bin/links_with_shared.exe', 'knife_6_6_6/linux-ubuntu-18/x86_64/stuff/bin/links_with_static.exe', 'knife_6_6_6/linux-ubuntu-18/x86_64/stuff/include/libfoo_shared.h', 'knife_6_6_6/linux-ubuntu-18/x86_64/stuff/include/libfoo_static.h', 'knife_6_6_6/linux-ubuntu-18/x86_64/stuff/lib/libfoo_shared.so', 'knife_6_6_6/linux-ubuntu-18/x86_64/stuff/lib/libfoo_static.a', 'knife_6_6_6/run.sh', 'knife_6_6_6/setup.sh', ], file_find.find(tm.root_dir))
def test_tool_installed_files(self): tm, am, amt = self._make_test_tm() amt.add_recipes(self.RECIPES) amt.publish(self.DESCRIPTORS) cuchillo = PD.parse('cuchillo-1.0.0') tm.ensure_tools(cuchillo) self.assertEqual([ 'cuchillo_1_0_0/linux-ubuntu-18/x86_64/db/packages.db', 'cuchillo_1_0_0/linux-ubuntu-18/x86_64/env/cuchillo_env.sh', 'cuchillo_1_0_0/linux-ubuntu-18/x86_64/env/framework/env/bes_framework.sh', 'cuchillo_1_0_0/linux-ubuntu-18/x86_64/run.sh', 'cuchillo_1_0_0/linux-ubuntu-18/x86_64/setup.sh', 'cuchillo_1_0_0/linux-ubuntu-18/x86_64/stuff/bin/cuchillo.py', 'cuchillo_1_0_0/run.sh', 'cuchillo_1_0_0/setup.sh', 'steel_1_0_0/linux-ubuntu-18/x86_64/db/packages.db', 'steel_1_0_0/linux-ubuntu-18/x86_64/env/carbon_env.sh', 'steel_1_0_0/linux-ubuntu-18/x86_64/env/framework/env/bes_framework.sh', 'steel_1_0_0/linux-ubuntu-18/x86_64/env/iron_env.sh', 'steel_1_0_0/linux-ubuntu-18/x86_64/env/steel_env.sh', 'steel_1_0_0/linux-ubuntu-18/x86_64/run.sh', 'steel_1_0_0/linux-ubuntu-18/x86_64/setup.sh', 'steel_1_0_0/linux-ubuntu-18/x86_64/stuff/bin/carbon.py', 'steel_1_0_0/linux-ubuntu-18/x86_64/stuff/bin/iron.py', 'steel_1_0_0/linux-ubuntu-18/x86_64/stuff/bin/steel_exe.py', 'steel_1_0_0/linux-ubuntu-18/x86_64/stuff/lib/python/easy-install.pth', 'steel_1_0_0/linux-ubuntu-18/x86_64/stuff/lib/python/site.py', 'steel_1_0_0/linux-ubuntu-18/x86_64/stuff/lib/python/steel.py', 'steel_1_0_0/run.sh', 'steel_1_0_0/setup.sh', 'wood_1_0_0/linux-ubuntu-18/x86_64/db/packages.db', 'wood_1_0_0/linux-ubuntu-18/x86_64/env/framework/env/bes_framework.sh', 'wood_1_0_0/linux-ubuntu-18/x86_64/env/wood_env.sh', 'wood_1_0_0/linux-ubuntu-18/x86_64/run.sh', 'wood_1_0_0/linux-ubuntu-18/x86_64/setup.sh', 'wood_1_0_0/linux-ubuntu-18/x86_64/stuff/bin/wood.py', 'wood_1_0_0/run.sh', 'wood_1_0_0/setup.sh', ], file_find.find(tm.root_dir))
def test_copy_tree_with_tar(self): self.maxDiff = None src_tmp_dir = temp_file.make_temp_dir(delete = not self.DEBUG) dst_tmp_dir = temp_file.make_temp_dir(delete = not self.DEBUG) with tarfile.open(self.data_path('test.tar'), mode = 'r') as f: f.extractall(path = src_tmp_dir) tar_util.copy_tree_with_tar(src_tmp_dir, dst_tmp_dir) expected_files = [ '1', '1/2', '1/2/3', '1/2/3/4', '1/2/3/4/5', '1/2/3/4/5/apple.txt', '1/2/3/4/5/kiwi.txt', 'bar.txt', 'empty', 'foo.txt', 'kiwi_link.txt', ] actual_files = file_find.find(dst_tmp_dir, file_type = file_find.ANY) self.assertEqual( expected_files, actual_files )
def test_extract_all_with_strip_common_ancestor_and_strip_head(self): assert self.default_archive_type items = temp_archive.make_temp_item_list([ ( 'base-1.2.3/foo/apple.txt', 'apple.txt\n' ), ( 'base-1.2.3/foo/durian.txt', 'durian.txt\n' ), ( 'base-1.2.3/foo/kiwi.txt', 'kiwi.txt\n' ), ( 'base-1.2.3/metadata/db.json', '{}\n' ), ]) tmp_archive = self.make_temp_archive_for_reading(items) tmp_dir = temp_file.make_temp_dir() tmp_archive.extract_all(tmp_dir, strip_common_ancestor = True, strip_head = 'foo') actual_files = file_find.find(tmp_dir, relative = True) expected_files = [ 'apple.txt', 'durian.txt', 'kiwi.txt', 'metadata/db.json', ] self.assertEqual( expected_files, actual_files )
def test_installed_files_only_files(self): recipe = ''' fake_package files 1.0.0 0 0 linux release x86_64 ubuntu 18 files bin/apple.sh \#!/bin/bash echo apple ; exit 0 bin/orange.sh \#!/bin/bash echo orange ; exit 0 ''' amt = self._make_test_amt( recipe, 'files;1.0.0;0;0;linux;release;x86_64;ubuntu;18') pm = self._make_caca_test_pm(amt.am) self._install_package(pm, 'files-1.0.0', 'linux-ubuntu-18/x86_64/release') self.assertEqual(['files-1.0.0'], pm.list_all(include_version=True)) expected = [ 'db/packages.db', 'env/framework/env/bes_framework.sh', 'stuff/bin/apple.sh', 'stuff/bin/orange.sh', ] self.assertEqual(expected, file_find.find(pm.root_dir, relative=True))
def test_create_from_binaries(self): self.maxDiff = None tmp_dir = temp_file.make_temp_dir(delete=not self.DEBUG) jail_config_content = ''' [jail] description: test binaries: /bin/bash /bin/echo /bin/ls /bin/sh /usr/lib/dyl* ''' tmp_jail_config = temp_file.make_temp_file(content=jail_config_content) cmd = [ self.__BES_JAIL_PY, 'create', tmp_dir, tmp_jail_config, ] rv = os_env.call_python_script(cmd) print(rv.stdout) self.assertEqual(0, rv.exit_code) expected_files = sorted([ '/bin', '/bin/bash', '/bin/echo', '/bin/ls', '/bin/sh', '/usr', '/usr/lib', '/usr/lib/dyld', '/usr/lib/dylib1.10.5.o', '/usr/lib/dylib1.o', '/usr/lib/libDiagnosticMessagesClient.dylib', '/usr/lib/libSystem.B.dylib', '/usr/lib/libauto.dylib', '/usr/lib/libc++.1.dylib', '/usr/lib/libc++abi.dylib', '/usr/lib/libncurses.5.4.dylib', '/usr/lib/libobjc.A.dylib', '/usr/lib/libutil.dylib', '/usr/lib/system', '/usr/lib/system/libcache.dylib', '/usr/lib/system/libcommonCrypto.dylib', '/usr/lib/system/libcompiler_rt.dylib', '/usr/lib/system/libcopyfile.dylib', '/usr/lib/system/libcorecrypto.dylib', '/usr/lib/system/libdispatch.dylib', '/usr/lib/system/libdyld.dylib', '/usr/lib/system/libkeymgr.dylib', '/usr/lib/system/liblaunch.dylib', '/usr/lib/system/libmacho.dylib', '/usr/lib/system/libquarantine.dylib', '/usr/lib/system/libremovefile.dylib', '/usr/lib/system/libsystem_asl.dylib', '/usr/lib/system/libsystem_blocks.dylib', '/usr/lib/system/libsystem_c.dylib', '/usr/lib/system/libsystem_configuration.dylib', '/usr/lib/system/libsystem_coreservices.dylib', '/usr/lib/system/libsystem_coretls.dylib', '/usr/lib/system/libsystem_dnssd.dylib', '/usr/lib/system/libsystem_info.dylib', '/usr/lib/system/libsystem_kernel.dylib', '/usr/lib/system/libsystem_m.dylib', '/usr/lib/system/libsystem_malloc.dylib', '/usr/lib/system/libsystem_network.dylib', '/usr/lib/system/libsystem_networkextension.dylib', '/usr/lib/system/libsystem_notify.dylib', '/usr/lib/system/libsystem_platform.dylib', '/usr/lib/system/libsystem_pthread.dylib', '/usr/lib/system/libsystem_sandbox.dylib', '/usr/lib/system/libsystem_secinit.dylib', '/usr/lib/system/libsystem_stats.dylib', '/usr/lib/system/libsystem_trace.dylib', '/usr/lib/system/libunc.dylib', '/usr/lib/system/libunwind.dylib', '/usr/lib/system/libxpc.dylib', ]) actual_files = file_find.find(tmp_dir, file_type=file_find.ANY, relative=True) actual_files = [path.join('/', f) for f in actual_files] self.assertEqual(expected_files, actual_files)
def test_remove_artifact(self): self.maxDiff = None mutations = { 'system': 'macos', 'distro': '', 'distro_version': '10.14' } am = FPUT.make_artifact_manager(self.DEBUG, RECIPES.FOODS, self.MACOS_BT, mutations) expected = [ AD.parse('apple;1.2.3;1;0;macos;release;x86_64;;10.14'), AD.parse('arsenic;1.2.9;1;0;macos;release;x86_64;;10.14'), AD.parse('citrus;1.0.0;2;0;macos;release;x86_64;;10.14'), AD.parse('fiber;1.0.0;0;0;macos;release;x86_64;;10.14'), AD.parse('fructose;3.4.5;6;0;macos;release;x86_64;;10.14'), AD.parse('fruit;1.0.0;0;0;macos;release;x86_64;;10.14'), AD.parse('knife;1.0.0;0;0;macos;release;x86_64;;10.14'), AD.parse('mercury;1.2.9;0;0;macos;release;x86_64;;10.14'), AD.parse('orange;6.5.4;3;0;macos;release;x86_64;;10.14'), AD.parse('orange_juice;1.4.5;0;0;macos;release;x86_64;;10.14'), AD.parse('pear;1.2.3;1;0;macos;release;x86_64;;10.14'), AD.parse('pear_juice;6.6.6;0;0;macos;release;x86_64;;10.14'), AD.parse('smoothie;1.0.0;0;0;macos;release;x86_64;;10.14'), AD.parse('water;1.0.0;2;0;macos;release;x86_64;;10.14'), ] self.assertEqual(expected, am.list_latest_versions(self.MACOS_BT)) self.assertEqual([ 'artifacts.db', 'macos-10.14/x86_64/release/apple-1.2.3-1.tar.gz', 'macos-10.14/x86_64/release/arsenic-1.2.10.tar.gz', 'macos-10.14/x86_64/release/arsenic-1.2.9-1.tar.gz', 'macos-10.14/x86_64/release/arsenic-1.2.9.tar.gz', 'macos-10.14/x86_64/release/citrus-1.0.0-2.tar.gz', 'macos-10.14/x86_64/release/fiber-1.0.0.tar.gz', 'macos-10.14/x86_64/release/fructose-3.4.5-6.tar.gz', 'macos-10.14/x86_64/release/fruit-1.0.0.tar.gz', 'macos-10.14/x86_64/release/knife-1.0.0.tar.gz', 'macos-10.14/x86_64/release/mercury-1.2.8-1.tar.gz', 'macos-10.14/x86_64/release/mercury-1.2.8.tar.gz', 'macos-10.14/x86_64/release/mercury-1.2.9.tar.gz', 'macos-10.14/x86_64/release/orange-6.5.4-3.tar.gz', 'macos-10.14/x86_64/release/orange_juice-1.4.5.tar.gz', 'macos-10.14/x86_64/release/pear-1.2.3-1.tar.gz', 'macos-10.14/x86_64/release/pear_juice-6.6.6.tar.gz', 'macos-10.14/x86_64/release/smoothie-1.0.0.tar.gz', 'macos-10.14/x86_64/release/water-1.0.0-1.tar.gz', 'macos-10.14/x86_64/release/water-1.0.0-2.tar.gz', 'macos-10.14/x86_64/release/water-1.0.0.tar.gz', ], file_find.find(am.root_dir)) am.remove_artifact( AD.parse('apple;1.2.3;1;0;macos;release;x86_64;;10.14')) am.remove_artifact( AD.parse('smoothie;1.0.0;0;0;macos;release;x86_64;;10.14')) expected = [ AD.parse('arsenic;1.2.9;1;0;macos;release;x86_64;;10.14'), AD.parse('citrus;1.0.0;2;0;macos;release;x86_64;;10.14'), AD.parse('fiber;1.0.0;0;0;macos;release;x86_64;;10.14'), AD.parse('fructose;3.4.5;6;0;macos;release;x86_64;;10.14'), AD.parse('fruit;1.0.0;0;0;macos;release;x86_64;;10.14'), AD.parse('knife;1.0.0;0;0;macos;release;x86_64;;10.14'), AD.parse('mercury;1.2.9;0;0;macos;release;x86_64;;10.14'), AD.parse('orange;6.5.4;3;0;macos;release;x86_64;;10.14'), AD.parse('orange_juice;1.4.5;0;0;macos;release;x86_64;;10.14'), AD.parse('pear;1.2.3;1;0;macos;release;x86_64;;10.14'), AD.parse('pear_juice;6.6.6;0;0;macos;release;x86_64;;10.14'), AD.parse('water;1.0.0;2;0;macos;release;x86_64;;10.14'), ] self.assertEqual(expected, am.list_latest_versions(self.MACOS_BT)) self.assertEqual([ 'artifacts.db', 'macos-10.14/x86_64/release/arsenic-1.2.10.tar.gz', 'macos-10.14/x86_64/release/arsenic-1.2.9-1.tar.gz', 'macos-10.14/x86_64/release/arsenic-1.2.9.tar.gz', 'macos-10.14/x86_64/release/citrus-1.0.0-2.tar.gz', 'macos-10.14/x86_64/release/fiber-1.0.0.tar.gz', 'macos-10.14/x86_64/release/fructose-3.4.5-6.tar.gz', 'macos-10.14/x86_64/release/fruit-1.0.0.tar.gz', 'macos-10.14/x86_64/release/knife-1.0.0.tar.gz', 'macos-10.14/x86_64/release/mercury-1.2.8-1.tar.gz', 'macos-10.14/x86_64/release/mercury-1.2.8.tar.gz', 'macos-10.14/x86_64/release/mercury-1.2.9.tar.gz', 'macos-10.14/x86_64/release/orange-6.5.4-3.tar.gz', 'macos-10.14/x86_64/release/orange_juice-1.4.5.tar.gz', 'macos-10.14/x86_64/release/pear-1.2.3-1.tar.gz', 'macos-10.14/x86_64/release/pear_juice-6.6.6.tar.gz', 'macos-10.14/x86_64/release/water-1.0.0-1.tar.gz', 'macos-10.14/x86_64/release/water-1.0.0-2.tar.gz', 'macos-10.14/x86_64/release/water-1.0.0.tar.gz', ], file_find.find(am.root_dir))
def _find_in_dir(clazz, where): if not path.isdir(where): return [] return file_find.find(where)
def contents(clazz, dmg): file_check.check_file(dmg) mnt = clazz._mount_at_temp_dir(dmg) files = file_find.find(mnt.mount_point, relative = True, file_type = file_find.FILE_OR_LINK) clazz._eject(mnt.mount_point) return files
def _handle_post_extract(clazz, dest_dir, include, exclude): all_files = file_find.find(dest_dir, relative = True, file_type = file_find.FILE | file_find.LINK) wanted_files = self._find(dest_dir, None, None, include, exclude) delta = set(all_files) - set(wanted_files) for f in delta: print('clobber: %s' % (f))
def find_strippable_binaries(clazz, d, format_name=None): 'Recursively find binaries that can be stripped in d.' files = file_find.find(d, relative=False) return [f for f in files if clazz.is_strippable(f, format_name)]
def main(): import bes vcli = version_cli(bes) parser = argparse.ArgumentParser() parser.add_argument('files', action = 'store', nargs = '*', help = 'Files or directories to rename') vcli.version_add_arguments(parser) parser.add_argument('--dry-run', '-n', action = 'store_true', default = False, help = 'Only print what files will get tests [ False ]') parser.add_argument('--timing', '-t', action = 'store_true', default = False, help = 'Show the amount of time it takes to run tests [ False ]') parser.add_argument('--verbose', '-v', action = 'store_true', default = False, help = 'Verbose debug output [ False ]') parser.add_argument('--stop', '-s', action = 'store_true', default = False, help = 'Stop right after the first failure. [ False ]') parser.add_argument('--randomize', action = 'store_true', default = False, help = 'Randomize the order in which unit tests run. [ False ]') parser.add_argument('--python', action = 'append', default = [], help = 'Python executable) to use. Multiple flags can be used for running with mutiple times with different python versions [ python ]') parser.add_argument('--page', '-p', action = 'store_true', default = False, help = 'Page output with $PAGER [ False ]') parser.add_argument('--profile', action = 'store', default = None, help = 'Profile the code with cProfile and store the output in the given argument [ None ]') parser.add_argument('--coverage', action = 'store', default = None, help = 'Run coverage on the code and store the output in the given argument [ None ]') parser.add_argument('--pager', action = 'store', default = os.environ.get('PAGER', 'more'), help = 'Pager to use when paging [ %s ]' % (os.environ.get('PAGER', 'more'))) parser.add_argument('--iterations', '-i', action = 'store', default = 1, type = int, help = 'Python executable to use [ python ]') parser.add_argument('--git', '-g', action = 'store_true', default = False, help = 'Use git status to figure out what has changed to test [ False ]') parser.add_argument('--pre-commit', action = 'store_true', default = False, help = 'Run pre commit checks [ False ]') parser.add_argument('--print-tests', action = 'store_true', default = False, help = 'Print the list of unit tests [ False ]') parser.add_argument('--print-files', action = 'store_true', default = False, help = 'Print the list of unit files [ False ]') parser.add_argument('--egg', action = 'store_true', default = False, help = 'Make an egg of the package and run the tests against that instead the live files. [ False ]') parser.add_argument('--save-egg', action = 'store_true', default = False, help = 'Save the egg in the current directory. [ False ]') parser.add_argument('--ignore', action = 'append', default = [], help = 'Patterns of filenames to ignore []') parser.add_argument('--root-dir', action = 'store', default = None, help = 'The root directory for all your projets. By default its computed from your git struture. [ None ]') parser.add_argument('--dont-hack-env', action = 'store_true', default = False, help = 'Dont hack PATH and PYTHONPATH. [ False ]') parser.add_argument('--compile-only', '-c', action = 'store_true', default = False, help = 'Just compile the files to verify syntax [ False ]') parser.add_argument('--print-deps', action = 'store_true', default = False, help = 'Print python dependencies for test files [ False ]') parser.add_argument('--print-configs', action = 'store_true', default = False, help = 'Print testing configs found [ False ]') parser.add_argument('--print-root-dir', action = 'store_true', default = False, help = 'Print the root dir [ False ]') parser.add_argument('--print-path', action = 'store_true', default = False, help = 'Print sys.path [ False ]') parser.add_argument('--file-ignore-file', action = 'append', default = [], help = 'List of file ignore files. [ .bes_test_ignore .bes_test_internal_ignore ]') parser.add_argument('--env', action = 'append', default = [], help = 'Environment variables to set [ None ]') parser.add_argument('--no-env-deps', action = 'store_true', default = False, help = 'Dont use env deps. [ False ]') parser.add_argument('--temp-dir', action = 'store', default = None, help = 'The directory to use for tmp files overriding the system default. [ None ]') for g in parser._action_groups: g._group_actions.sort(key = lambda x: x.dest) args = parser.parse_args() if args.temp_dir: file_util.mkdir(args.temp_dir) tempfile.tempdir = args.temp_dir if os.environ.get('DEBUG', False): args.verbose = True cwd = os.getcwd() if args.version: vcli.version_print_version() return 0 args.env = _parse_args_env(args.env) if not args.files: args.files = [ cwd ] if not args.file_ignore_file: args.file_ignore_file = [ '.bes_test_ignore', '.bes_test_internal_ignore' ] ar = argument_resolver(cwd, args.files, root_dir = args.root_dir, file_ignore_filename = args.file_ignore_file, check_git = args.git, use_env_deps = not args.no_env_deps) ar.num_iterations = args.iterations ar.randomize = args.randomize ar.ignore_with_patterns(args.ignore) if args.compile_only: total_files = len(ar.all_files) for i, f in enumerate(ar.all_files): tmp = temp_file.make_temp_file() filename_count_blurb = ' ' + _make_count_blurb(i + 1, total_files) short_filename = file_util.remove_head(f, cwd) blurb = '%7s:%s %s ' % ('compile', filename_count_blurb, short_filename) printer.writeln_name(blurb) py_compile.compile(f, cfile = tmp, doraise = True) return 0 if not ar.test_descriptions: return 1 if args.print_path: for p in sys.path: print(p) return 0 if args.print_configs: ar.print_configs() return 0 if args.print_root_dir: print(ar.root_dir) return 0 if args.print_files: ar.print_files() return 0 if args.print_tests: ar.print_tests() return 0 if args.print_deps or args.pre_commit and not ar.supports_test_dependency_files(): printer.writeln_name('ERROR: Cannot figure out dependencies. snakefood missing.') return 1 if args.print_deps: dep_files = ar.test_dependency_files() for filename in sorted(dep_files.keys()): print(filename) for dep_file in dep_files[filename]: print(' %s' % (dep_file.filename)) return 0 # Start with a clean environment so unit testing can be deterministic and not subject # to whatever the user happened to have exported. PYTHONPATH and PATH for dependencies # are set below by iterating the configs keep_keys = [ 'BES_LOG', 'BES_VERBOSE', 'BESCFG_PATH', 'DEBUG', 'BES_TEMP_DIR' ] if args.dont_hack_env: keep_keys.extend([ 'PATH', 'PYTHONPATH']) env = os_env.make_clean_env(keep_keys = keep_keys) env['PYTHONDONTWRITEBYTECODE'] = 'x' variables = { 'rebuild_dir': path.expanduser('~/.rebuild'), 'system': host.SYSTEM, } if not args.dont_hack_env: ar.update_environment(env, variables) # Update env with whatever was given in --env env.update(args.env) num_passed = 0 num_failed = 0 num_executed = 0 num_tests = len(ar.test_descriptions) failed_tests = [] # Remove current dir from sys.path to avoid side effects if cwd in sys.path: sys.path.remove(cwd) if args.egg: pythonpath = env_var(env, 'PYTHONPATH') pythonpath.remove(cwd) for config in ar.env_dependencies_configs: setup_dot_py = path.join(config.root_dir, 'setup.py') if not path.isfile(setup_dot_py): raise RuntimeError('No setup.py found in %s to make the egg.' % (cwd)) egg_zip = egg.make(setup_dot_py) pythonpath.prepend(egg_zip) printer.writeln_name('using tmp egg: %s' % (egg_zip)) if args.save_egg: file_util.copy(egg_zip, path.join(cwd, path.basename(egg_zip))) if args.pre_commit: missing_from_git = [] for filename, dep_files in ar.test_dependency_files().items(): for dep_file in dep_files: if dep_file.config and not dep_file.git_tracked: missing_from_git.append(dep_file.filename) if missing_from_git: for f in missing_from_git: printer.writeln_name('PRE_COMMIT: missing from git: %s' % (path.relpath(f))) return 1 return 0 ar.cleanup_python_compiled_files() # Do all our work with a temporary working directory to be able to check for side effects tmp_cwd = temp_file.make_temp_dir(prefix = 'bes_test_', suffix = '.tmp.dir', delete = False) os.chdir(tmp_cwd) # Use what the OS thinks the path is (to deal with symlinks and virtual tmpfs things) tmp_cwd = os.getcwd() if not args.dry_run and args.page: printer.OUTPUT = tempfile.NamedTemporaryFile(prefix = 'bes_test', delete = True, mode = 'w') total_tests = _count_tests(ar.inspect_map, ar.test_descriptions) total_files = len(ar.test_descriptions) total_num_tests = 0 if not args.python: args.python = [ 'python' ] if args.profile: args.profile = path.abspath(args.profile) if not _check_program('cprofilev'): return 1 if args.coverage: args.coverage = path.abspath(args.coverage) coverage_exe = _check_program('coverage') if not coverage_exe: return 1 args.python = [ coverage_exe ] if args.profile and args.coverage: printer.writeln_name('ERROR: --profile and --coverage are mutually exclusive.') return 1 options = test_options(args.dry_run, args.verbose, args.stop, args.timing, args.profile, args.coverage, args.python, args.temp_dir) timings = {} total_time_start = time.time() stopped = False for i, test_desc in enumerate(ar.test_descriptions): file_info = test_desc.file_info filename = file_info.filename if not filename in timings: timings[filename] = [] for python_exe in args.python: result = _test_execute(python_exe, ar.inspect_map, filename, test_desc.tests, options, i + 1, total_files, cwd, env) timings[filename].append(result.elapsed_time) total_num_tests += result.num_tests_run num_executed += 1 if result.success: num_passed += 1 else: num_failed += 1 failed_tests.append(( python_exe, filename, result )) if args.stop and not result.success: stopped = True if stopped: break total_elapsed_time = 1000 * (time.time() - total_time_start) if args.dry_run: return 0 num_skipped = num_tests - num_executed summary_parts = [] if total_num_tests == total_tests: function_summary = '(%d %s)' % (total_tests, _make_test_string(total_tests)) else: function_summary = '(%d of %d %s)' % (total_num_tests, total_tests, _make_test_string(total_tests)) if num_failed > 0: summary_parts.append('%d of %d fixtures FAILED' % (num_failed, num_tests)) summary_parts.append('%d of %d passed %s' % (num_passed, num_tests, function_summary)) if num_skipped > 0: summary_parts.append('%d of %d skipped' % (num_skipped, num_tests)) summary = '; '.join(summary_parts) printer.writeln_name('%s' % (summary)) if failed_tests: longest_python_exe = max([len(path.basename(p)) for p in options.interpreters]) for python_exe, filename, result in failed_tests: if len(options.interpreters) > 1: python_exe_blurb = path.basename(python_exe).rjust(longest_python_exe) else: python_exe_blurb = '' error_status = unit_test_output.error_status(result.output) for error in error_status.errors: printer.writeln_name('%5s: %s %s :%s.%s' % (error.error_type, python_exe_blurb, file_util.remove_head(filename, cwd), error.fixture, error.function)) if num_failed > 0: rv = 1 else: rv = 0 if args.timing: filenames = sorted(timings.keys()) num_filenames = len(filenames) for i, filename in zip(range(0, num_filenames), filenames): short_filename = file_util.remove_head(filename, cwd) all_timings = timings[filename] num_timings = len(all_timings) avg_ms = _timing_average(all_timings) * 1000.0 if num_timings > 1: run_blurb = '(average of %d runs)' % (num_timings) else: run_blurb = '' if num_filenames > 1: count_blurb = '[%s of %s] ' % (i + 1, num_filenames) else: count_blurb = '' printer.writeln_name('timing: %s%s - %2.2f ms %s' % (count_blurb, short_filename, avg_ms, run_blurb)) if total_elapsed_time >= 1000.0: printer.writeln_name('total time: %2.2f s' % (total_elapsed_time / 1000.0)) else: printer.writeln_name('total time: %2.2f ms' % (total_elapsed_time)) if args.page: subprocess.call([ args.pager, printer.OUTPUT.name ]) current_cwd = os.getcwd() if current_cwd != tmp_cwd: printer.writeln_name('SIDE EFFECT: working directory was changed from %s to %s' % (tmp_cwd, current_cwd)) droppings = file_find.find(current_cwd, relative = False, file_type = file_find.ANY) for dropping in droppings: printer.writeln_name('SIDE EFFECT: dropping found: %s' % (dropping)) if not droppings: os.chdir('/tmp') file_util.remove(tmp_cwd) return rv
def num_links(clazz, d): files = file_find.find(d, relative = False, file_type = file_find.FILE) return [ os.stat(f).st_nlink for f in files ]