def xtest_write_filename(self): filename = temp_file.make_temp_file(content = self.FOO_PC) cf = caca_pkg_config_file() cf.parse_file(filename) new_filename = temp_file.make_temp_file() cf.write_file(new_filename) new_cf = caca_pkg_config_file() new_cf.parse_file(new_filename) self.assertEqual( cf, new_cf )
def _make_temp_archive_xz(clazz, items, filename, mode): tmp_dir = temp_file.make_temp_dir() for item in items: assert item assert item.arcname file_util.save(path.join(tmp_dir, item.arcname), content = item.content) tmp_xz = temp_file.make_temp_file() manifest_content = '\n'.join([ item.arcname for item in items ]) manifest = temp_file.make_temp_file(content = manifest_content) cmd = 'tar Jcf %s -C %s -T %s' % (filename, tmp_dir, manifest) execute.execute(cmd) file_util.remove(tmp_dir)
def diff(clazz, archive1, archive2, strip_common_ancestor = False): 'Return the output of diffing the contents of 2 archives.' members1 = archiver.members(archive1) members2 = archiver.members(archive2) content1 = '\n'.join(members1) content2 = '\n'.join(members2) tmp_file1 = temp_file.make_temp_file(content = content1) tmp_file2 = temp_file.make_temp_file(content = content2) rv = execute.execute('diff -u %s %s' % (tmp_file1, tmp_file2), raise_error = False) return rv.stdout
def _call_patch(clazz, patch, cwd, strip, backup, posix, program): program = program or 'patch' program_abs = file_path.which(program) if not program_abs: raise RuntimeError('patch program not found: %s' % (program)) cmd = [program_abs, '--force', '--silent'] if strip != None: cmd.append('-p%d' % (strip)) if backup: cmd.append('-b') if posix: cmd.append('--posix') # If the patch is compressed, uncompress it to a temp file. # Not sure why gzip.open() does not work in this case; i tried. if clazz.patch_is_compressed(patch): uncompressed_patch = temp_file.make_temp_file(prefix='patch_', suffix='.patch') compressed_file.uncompress(patch, uncompressed_patch) patch = uncompressed_patch with open(patch, 'r') as stdin: return clazz.__call_subprocess(cmd, cwd, stdin) return (1, None)
def extract(self, dest_dir, base_dir = None, strip_common_ancestor = False, strip_head = None, include = None, exclude = None): dest_dir = self._determine_dest_dir(dest_dir, base_dir) filtered_members = self._filter_for_extract(self.members, include, exclude) if filtered_members == self.members: return self.extract_all(dest_dir, base_dir = base_dir, strip_common_ancestor = strip_common_ancestor, strip_head = strip_head) return self.extract_all(dest_dir, base_dir = base_dir, strip_common_ancestor = strip_common_ancestor, strip_head = strip_head) self._handle_post_extract(dest_dir, include, exclude) return # Cheat by using a temporary zip file to do the actual work. Super innefecient but # easy since theres no library to extract just some stuff from dmg files. tmp_dir = temp_file.make_temp_dir() dmg.extract(self.filename, tmp_dir) tmp_zip = temp_file.make_temp_file(suffix = '.zip') az = archive_zip(tmp_zip) az.create(tmp_dir) az.extract(dest_dir, base_dir = base_dir, strip_common_ancestor = strip_common_ancestor, strip_head = strip_head, include = include, exclude = exclude) file_util.remove(tmp_zip) file_util.remove(tmp_dir)
def test_cached_content(self): tmp_root_dir = temp_file.make_temp_dir(prefix = 'test_cached_root_', suffix = '.dir', delete = not self.DEBUG) tmp_filename = temp_file.make_temp_file(content = 'foo\n', delete = not self.DEBUG) expected_content = file_util.read(tmp_filename) actual_content = file_cache.cached_content(tmp_filename, root_dir = tmp_root_dir) self.assertEqual( expected_content, actual_content )
def test_create_from_packages(self): tmp_dir = temp_file.make_temp_dir(delete=not self.DEBUG) jail_config_content = ''' [jail] description: test packages: %s [%s] ''' % (self.__PACKAGE_ID, self.__PACKAGE_ID) tmp_jail_config = temp_file.make_temp_file(content=jail_config_content) cmd = [ self.__BES_JAIL_PY, 'create', tmp_dir, tmp_jail_config, ] rv = os_env.call_python_script(cmd) print(rv.stdout) self.assertEqual(0, rv.exit_code) expected_files = npm.package_contents(self.__PACKAGE_ID) actual_files = file_find.find(tmp_dir, file_type=file_find.ANY, relative=True) actual_files = [path.join('/', f) for f in actual_files] self.assertEqual(expected_files, actual_files)
def test_config_file(self): tmp_config_file = temp_file.make_temp_file(content = self.__TEST_CONFIG_CONTENT) variables = { 'root': 'caca' } config = config_file(tmp_config_file, variables) self.assertEqual( 'nice jail', config.jail.description ) self.assertEqual( 3, len(config.jail.packages) ) self.assertEqual( 'orange', config.jail.packages[0].name ) self.assertEqual( 'orange desc', config.jail.packages[0].description ) self.assertEqual( [ '/usr*' ], config.jail.packages[0].include ) self.assertEqual( None, config.jail.packages[0].exclude ) self.assertEqual( 'apple', config.jail.packages[1].name ) self.assertEqual( 'apple desc', config.jail.packages[1].description ) self.assertEqual( [ '/foo*', '/bar*' ], config.jail.packages[1].include ) self.assertEqual( [ '/baz*' ], config.jail.packages[1].exclude ) self.assertEqual( [ '/pear' ], config.jail.packages[1].missing ) self.assertEqual( 'kiwi', config.jail.packages[2].name ) self.assertEqual( 'none', config.jail.packages[2].description ) self.assertEqual( None, config.jail.packages[2].include ) self.assertEqual( None, config.jail.packages[2].exclude ) self.assertEqual( [ '/bin/bash' ], config.jail.binaries ) self.assertEqual( [ 'ln -s x caca/y', 'ln -s a caca/b' ], config.jail.hooks.pre ) self.assertEqual( [ 'ln -s c caca/d', 'ln -s e caca/f' ], config.jail.hooks.post ) self.assertEqual( [ 'rm -f caca/foo' ], config.jail.hooks.cleanup )
def publish_artifacts(clazz, am): artifacts = file_find.find_fnmatch(am.root_dir, [ '*.tar.gz' ], relative = False) for artifact in artifacts: tmp_artifact = temp_file.make_temp_file() file_util.copy(artifact, tmp_artifact) file_util.remove(artifact) p = package(tmp_artifact) am.publish(tmp_artifact, p.metadata.build_target, False)
def test_read_file(self): content = ''' # comment *.txt *.png foobar # comment ''' tmp = temp_file.make_temp_file(content = content) self.assertEqual( ( path.dirname(tmp), [ '*.txt', '*.png', 'foobar' ] ), IFD.read_file(tmp) )
def _make_temp_archive_dmg(clazz, items, filename, mode): tmp_dir = temp_file.make_temp_dir() for item in items: assert item assert item.arcname file_util.save(path.join(tmp_dir, item.arcname), content = item.content) tmp_dmg = temp_file.make_temp_file() cmd = 'hdiutil create -srcfolder %s -ov -format UDZO %s' % (tmp_dir, filename) execute.execute(cmd) file_util.remove(tmp_dir)
def create_many_packages(clazz, recipe, metadata_mutations={}, debug=False): recipes = clazz._parse_many_recipes(recipe, metadata_mutations) result = [] for r in recipes: tmp_file = temp_file.make_temp_file(delete=not debug) if debug: print('tmp_file: %s' % (tmp_file)) result.append(r.create_package(tmp_file, debug=debug).filename) return result
def xtest_write_many_examples(self): examples = glob.glob(path.join(self.data_path(), '*.pc')) for example in examples: cf = caca_pkg_config_file() cf.parse_file(example) new_filename = temp_file.make_temp_file() cf.write_file(new_filename) new_cf = caca_pkg_config_file() new_cf.parse_file(new_filename) self.assertEqual( cf, new_cf )
def get_data_program_exe(clazz, program_path, filename, module_name): try: inside_egg, exe_data = clazz._resolve_data(program_path, filename, module_name) if not inside_egg: if not file_path.is_executable(exe_data): return None return exe_data exe_tmp = temp_file.make_temp_file(content = exe_data, prefix = path.basename(program_path) + '-') os.chmod(exe_tmp, 0o755) return exe_tmp except Exception as ex: return None
def _make_temp_archive_tar(clazz, items, filename, mode): with open(filename, 'wb') as fp: archive = tarfile.open(fileobj = fp, mode = mode) for item in items: assert item assert item.arcname tmp_content = temp_file.make_temp_file(item.content) archive.add(tmp_content, arcname = item.arcname) file_util.remove(tmp_content) archive.close() fp.flush() fp.close()
def create_package(self, adesc, mutations={}): if check.is_string(adesc): adesc = artifact_descriptor.parse(adesc) check.check_artifact_descriptor(adesc) key = str(adesc) if not key in self._recipes: raise KeyError('recipe not found: %s' % (key)) recipe = self._recipes[key] if mutations: recipe = recipe.clone_with_mutations(mutations) tmp_file = temp_file.make_temp_file() return recipe.create_package(tmp_file, debug=self._debug).filename
def test_cached_filename(self): tmp_root_dir = temp_file.make_temp_dir(prefix = 'test_cached_root_', suffix = '.dir', delete = not self.DEBUG) tmp_filename = temp_file.make_temp_file(content = 'foo\n', delete = not self.DEBUG) if self.DEBUG: print("\ntmp_root_dir: ", tmp_root_dir) print("tmp_filename: ", tmp_filename) expected_content = file_util.read(tmp_filename) cached_filename = file_cache.cached_filename(tmp_filename, root_dir = tmp_root_dir) actual_content = file_util.read(cached_filename) self.assertEqual( expected_content, actual_content ) self.assertNotEqual( tmp_filename, cached_filename )
def test_extract_member_to_file(self): assert self.default_archive_type items = temp_archive.make_temp_item_list([ ( 'foo/apple.txt', 'apple.txt\n' ), ( 'foo/durian.txt', 'durian.txt\n' ), ( 'foo/kiwi.txt', 'kiwi.txt\n' ), ( 'metadata/db.json', '{}\n' ), ]) tmp_archive = self.make_temp_archive_for_reading(items) tmp_file = temp_file.make_temp_file() tmp_archive.extract_member_to_file('foo/apple.txt', tmp_file) self.assertEqual( b'apple.txt\n', file_util.read(tmp_file) )
def test_make_temp_archive_from_file(self): tmp_file = temp_file.make_temp_file(content = 'foo.txt\n', suffix = '.foo.txt') tmp_archive = self._make_temp_archive('tgz', items = [ temp_archive.item('foo.txt', filename = tmp_file) ]) self.assertTrue( path.isfile(tmp_archive.filename) ) self.assertTrue( tarfile.is_tarfile(tmp_archive.filename) ) self.assertFalse( zipfile.is_zipfile(tmp_archive.filename) ) tmp_dir = temp_file.make_temp_dir() with tarfile.open(tmp_archive.filename, mode = 'r') as archive: archive.extractall(path = tmp_dir) tmp_member_path = path.join(tmp_dir, 'foo.txt') self.assertTrue( path.isfile(tmp_member_path) ) self.assertEqual( b'foo.txt\n', file_util.read(tmp_member_path) )
def extract(self, dest_dir, base_dir = None, strip_common_ancestor = False, strip_head = None, include = None, exclude = None): dest_dir = self._determine_dest_dir(dest_dir, base_dir) filtered_members = self._filter_for_extract(self.members, include, exclude) if filtered_members == self.members: self.extract_all(dest_dir, base_dir = base_dir, strip_common_ancestor = strip_common_ancestor, strip_head = strip_head) return manifest = temp_file.make_temp_file(content = '\n'.join(filtered_members)) cmd = 'tar xf %s -C %s -T %s' % (self.filename, dest_dir, manifest) execute.execute(cmd) self._handle_extract_strip_common_ancestor(filtered_members, strip_common_ancestor, strip_head, dest_dir)
def create(self, root_dir, base_dir = None, extra_items = None, include = None, exclude = None): self._pre_create() items = self._find(root_dir, base_dir, extra_items, include, exclude) tmp_dir = temp_file.make_temp_dir() for item in items: file_util.copy(item.filename, path.join(tmp_dir, item.arcname)) manifest_content = '\n'.join([ item.arcname for item in items ]) manifest = temp_file.make_temp_file(content = manifest_content) cmd = 'tar Jcf %s -C %s -T %s' % (self.filename, tmp_dir, manifest) execute.execute(cmd) file_util.remove(tmp_dir)
def test_file_is_valid(self): tmp_zip = temp_archive.make_temp_archive([ temp_archive.item('foo.txt', content = 'foo.txt\n') ], archive_extension.ZIP) self.assertTrue( archive_zip.file_is_valid(tmp_zip.filename) ) tmp_tar = temp_archive.make_temp_archive([ temp_archive.item('foo.txt', content = 'foo.txt\n') ], archive_extension.TAR) self.assertFalse( archive_zip.file_is_valid(tmp_tar.filename) ) tmp_tgz = temp_archive.make_temp_archive([ temp_archive.item('foo.txt', content = 'foo.txt\n') ], archive_extension.TGZ) self.assertFalse( archive_zip.file_is_valid(tmp_tgz.filename) ) tmp_tar_gz = temp_archive.make_temp_archive([ temp_archive.item('foo.txt', content = 'foo.txt\n') ], archive_extension.TAR_GZ) self.assertFalse( archive_zip.file_is_valid(tmp_tar_gz.filename) ) self.assertFalse( archive_zip.file_is_valid(temp_file.make_temp_file(content = 'junk\n')) )
def create(self, root_dir, base_dir = None, extra_items = None, include = None, exclude = None): items = self._find(root_dir, base_dir, extra_items, include, exclude) ext = archive_extension.extension_for_filename(self.filename) mode = archive_extension.write_format_for_filename(self.filename) # print('CACA: ext=%s' % (ext)) # print('CACA: mode=%s' % (mode)) tmp_dir = temp_file.make_temp_dir() for item in items: file_util.copy(item.filename, path.join(tmp_dir, item.arcname)) manifest_content = '\n'.join([ item.arcname for item in items ]) manifest = temp_file.make_temp_file(content = manifest_content) cmd = 'tar Jcf %s -C %s -T %s' % (self.filename, tmp_dir, manifest) execute.execute(cmd) file_util.remove(tmp_dir)
def extract_members(self, members, dest_dir, base_dir = None, strip_common_ancestor = False, strip_head = None, include = None, exclude = None): # Cheat by using a temporary zip file to do the actual work. Super innefecient but # easy since theres no library to extract just some stuff from dmg files. tmp_dir = temp_file.make_temp_dir() dmg.extract(self.filename, tmp_dir) tmp_zip = temp_file.make_temp_file(suffix = '.zip') az = archive_zip(tmp_zip) az.create(tmp_dir) az.extract_members(members, dest_dir, base_dir = base_dir, strip_common_ancestor = strip_common_ancestor, strip_head = strip_head, include = include, exclude = exclude) file_util.remove(tmp_zip) file_util.remove(tmp_dir)
def test_should_ignore(self): content = ''' # comment *.txt *.png foobar # comment ''' tmp = temp_file.make_temp_file(content = content) a = IFD.read_file(tmp) self.assertTrue( a.should_ignore('ppp.txt') ) self.assertTrue( a.should_ignore('foo.png') ) self.assertTrue( a.should_ignore('foobar') ) self.assertFalse( a.should_ignore('ppp.pdf') ) self.assertFalse( a.should_ignore('foo.jpg') ) self.assertFalse( a.should_ignore('foobarx') )
def test_load_checksums_file(self): content = '''\ [ [ "a.txt", "7bf1c5c4153ecb5364b0c7fcb2e767fadc6880e0c2620b69df56b6bb5429448d" ], [ "b.txt", "429340c7abf63fe50eb3076d3f1d5d996f3b4ee3067734ae8832129af244653c" ] ]''' tmp_file = temp_file.make_temp_file(content = content) expected = FCL.from_files([ 'a.txt', 'b.txt' ], root_dir = self.data_dir()) self.assertEqual( expected, FCL.load_checksums_file(tmp_file) )
def make_test_package(clazz, pm, debug = False): pkg_config_pc_contnet = clazz.make_pkg_config_pc_content(pm.name, pm.build_version) script_content = '#!/bin/bash\necho %s-%s\nexit 0\n' % (pm.name, pm.build_version) name = pm.name.replace('_conflict', '') items = [ clazz.make_temp_item(name, pm.version, '_foo.txt', 'docs'), clazz.make_temp_item(name, pm.version, '_bar.txt', 'docs'), clazz.make_temp_item(name, pm.version, '_script.sh', 'bin', content = script_content, mode = 0o755), temp_item('lib/pkgconfig/%s.pc' % (name), content = pkg_config_pc_contnet) ] tmp_stage_dir = temp_file.make_temp_dir(delete = not debug) tmp_stage_files_dir = path.join(tmp_stage_dir, 'files') temp_file.write_temp_files(tmp_stage_files_dir, items) tmp_tarball = temp_file.make_temp_file(prefix = pm.package_descriptor.full_name, suffix = '.tar.gz', delete = not debug) package.create_package(tmp_tarball, pm.package_descriptor, pm.build_target, tmp_stage_dir) return clazz.test_package(tmp_tarball, pm)
def test_save_checksums_file(self): tmp_file = temp_file.make_temp_file() a = FCL.from_files([ 'a.txt', 'b.txt' ], root_dir = self.data_dir()) a.save_checksums_file(tmp_file) expected = '''\ [ [ "a.txt", "7bf1c5c4153ecb5364b0c7fcb2e767fadc6880e0c2620b69df56b6bb5429448d" ], [ "b.txt", "429340c7abf63fe50eb3076d3f1d5d996f3b4ee3067734ae8832129af244653c" ] ]''' self.assertEqual( expected, file_util.read(tmp_file, codec = 'utf8') )
def _create_package(clazz, tarball_filename, stage_dir, timer): 'Return the list of files to package. Maybe could do some filtering here. Using find because its faster that bes.fs.file_find.' if timer: timer.start('create_package - determine manifest') files_to_package = clazz._determine_manifest(stage_dir) if timer: timer.stop() file_util.mkdir(path.dirname(tarball_filename)) manifest = temp_file.make_temp_file( content='\n'.join(files_to_package)) tar_cmd = [ 'tar', 'zcf', tarball_filename, '-C', stage_dir, '-T', manifest ] if timer: timer.start('create_package - execute %s' % (' '.join(tar_cmd))) execute.execute(tar_cmd) if timer: timer.stop()
def test_bes_sudo_editor_py(self): mode = 0o440 sudoers_tmp = temp_file.make_temp_file(content=self.__SUDOERS_UBUNTU, delete=not self.DEBUG) os.chmod(sudoers_tmp, mode) cmd = [ self._BES_SUDO_EDITOR_PY, '--sudoers', sudoers_tmp, 'chupacabra', '/usr/sbin/chroot', '1', ] rv = os_env.call_python_script(cmd) if rv.exit_code != 0: raise RuntimeError('Failed to exectute \"%s\": %s' % (' '.join(cmd), rv.stdout)) self.assertEquals(0, rv.exit_code) self.assertEquals(mode, file_util.mode(sudoers_tmp)) actual_content = file_util.read(sudoers_tmp, codec='utf-8') difference = actual_content.replace(self.__SUDOERS_UBUNTU, '').strip() self.assertEquals( 'chupacabra ALL = (root) NOPASSWD: /usr/sbin/chroot # bes_sudo:v1', difference) cmd = [ self._BES_SUDO_EDITOR_PY, '--sudoers', sudoers_tmp, 'tjefferson', '/bin/cat', '1', ] rv = os_env.call_python_script(cmd) if rv.exit_code != 0: raise RuntimeError('Failed to exectute \"%s\": %s' % (' '.join(cmd), rv.stdout)) self.assertEquals(0, rv.exit_code) actual_content = file_util.read(sudoers_tmp, codec='utf-8') difference = actual_content.replace(self.__SUDOERS_UBUNTU, '').strip() self.assertEquals( 'chupacabra ALL = (root) NOPASSWD: /usr/sbin/chroot # bes_sudo:v1\ntjefferson ALL = (root) NOPASSWD: /bin/cat # bes_sudo:v1', difference)
def instructions(self, env): buf = StringIO() buf.write('#!/bin/bash\n') buf.write('echo "----1----"\n') buf.write('declare -px\n') buf.write('echo "----2----"\n') for f in self.files_abs: buf.write('source \"%s\"\n' % (f)) buf.write('echo "----3----"\n') buf.write('declare -px\n') buf.write('echo "----4----"\n') script = temp_file.make_temp_file(content = buf.getvalue(), delete = not self.DEBUG) if self.DEBUG: print('env_dir: script=%s' % (script)) os.chmod(script, 0o755) try: rv = execute.execute(script, raise_error = True, shell = True, env = env) finally: if not self.DEBUG: file_util.remove(script) parser = text_line_parser(rv.stdout) if rv.stderr: raise RuntimeError(rv.stderr) env1 = self._parse_env_lines(parser.cut_lines('----1----', '----2----')) env2 = self._parse_env_lines(parser.cut_lines('----3----', '----4----')) delta = self._env_delta(env1, env2) instructions = [] for key in delta.added: instructions.append(instruction(key, env2[key], action.SET)) for key in delta.removed: instructions.append(instruction(key, None, action.UNSET)) for key in delta.changed: value1 = env1[key] value2 = env2[key] for inst in self._determine_change_instructions(key, value1, value2): instructions.append(inst) return sorted(instructions, key = lambda x: ( x.key, x.value ) )
def make_temp_archive(clazz, items, extension, delete = True, prefix = None): prefix = prefix or 'tmp_' assert archive_extension.is_valid_ext(extension) ext_type = clazz._determine_type(extension) assert ext_type temp_archive_filename = temp_file.make_temp_file(suffix = '.' + extension, prefix = prefix, delete = False) archive_mode = archive_extension.write_format(extension) if ext_type == 'zip': clazz._make_temp_archive_zip(items, temp_archive_filename, archive_mode) elif ext_type == 'tar': clazz._make_temp_archive_tar(items, temp_archive_filename, archive_mode) elif ext_type == 'dmg': clazz._make_temp_archive_dmg(items, temp_archive_filename, archive_mode) elif ext_type == 'xz': clazz._make_temp_archive_xz(items, temp_archive_filename, archive_mode) if delete: temp_file.atexit_delete(temp_archive_filename) return clazz.Result(open(temp_archive_filename, 'rb'), temp_archive_filename)
def __make_test_config(self, content): return temp_file.make_temp_file(content = content)
def test_not_git_tracked(self): ce = config_env(self.data_dir()) a = FI(ce, temp_file.make_temp_file(content = 'def foo(): return 666\n')) self.assertEqual( None, a.git_root ) self.assertEqual( False, a.git_tracked ) self.assertEqual( None, a.config )
def test_create_from_binaries(self): self.maxDiff = None tmp_dir = temp_file.make_temp_dir(delete=not self.DEBUG) jail_config_content = ''' [jail] description: test binaries: /bin/bash /bin/echo /bin/ls /bin/sh /usr/lib/dyl* ''' tmp_jail_config = temp_file.make_temp_file(content=jail_config_content) cmd = [ self.__BES_JAIL_PY, 'create', tmp_dir, tmp_jail_config, ] rv = os_env.call_python_script(cmd) print(rv.stdout) self.assertEqual(0, rv.exit_code) expected_files = sorted([ '/bin', '/bin/bash', '/bin/echo', '/bin/ls', '/bin/sh', '/usr', '/usr/lib', '/usr/lib/dyld', '/usr/lib/dylib1.10.5.o', '/usr/lib/dylib1.o', '/usr/lib/libDiagnosticMessagesClient.dylib', '/usr/lib/libSystem.B.dylib', '/usr/lib/libauto.dylib', '/usr/lib/libc++.1.dylib', '/usr/lib/libc++abi.dylib', '/usr/lib/libncurses.5.4.dylib', '/usr/lib/libobjc.A.dylib', '/usr/lib/libutil.dylib', '/usr/lib/system', '/usr/lib/system/libcache.dylib', '/usr/lib/system/libcommonCrypto.dylib', '/usr/lib/system/libcompiler_rt.dylib', '/usr/lib/system/libcopyfile.dylib', '/usr/lib/system/libcorecrypto.dylib', '/usr/lib/system/libdispatch.dylib', '/usr/lib/system/libdyld.dylib', '/usr/lib/system/libkeymgr.dylib', '/usr/lib/system/liblaunch.dylib', '/usr/lib/system/libmacho.dylib', '/usr/lib/system/libquarantine.dylib', '/usr/lib/system/libremovefile.dylib', '/usr/lib/system/libsystem_asl.dylib', '/usr/lib/system/libsystem_blocks.dylib', '/usr/lib/system/libsystem_c.dylib', '/usr/lib/system/libsystem_configuration.dylib', '/usr/lib/system/libsystem_coreservices.dylib', '/usr/lib/system/libsystem_coretls.dylib', '/usr/lib/system/libsystem_dnssd.dylib', '/usr/lib/system/libsystem_info.dylib', '/usr/lib/system/libsystem_kernel.dylib', '/usr/lib/system/libsystem_m.dylib', '/usr/lib/system/libsystem_malloc.dylib', '/usr/lib/system/libsystem_network.dylib', '/usr/lib/system/libsystem_networkextension.dylib', '/usr/lib/system/libsystem_notify.dylib', '/usr/lib/system/libsystem_platform.dylib', '/usr/lib/system/libsystem_pthread.dylib', '/usr/lib/system/libsystem_sandbox.dylib', '/usr/lib/system/libsystem_secinit.dylib', '/usr/lib/system/libsystem_stats.dylib', '/usr/lib/system/libsystem_trace.dylib', '/usr/lib/system/libunc.dylib', '/usr/lib/system/libunwind.dylib', '/usr/lib/system/libxpc.dylib', ]) actual_files = file_find.find(tmp_dir, file_type=file_find.ANY, relative=True) actual_files = [path.join('/', f) for f in actual_files] self.assertEqual(expected_files, actual_files)
def test_file(self): tmp_file = temp_file.make_temp_file() self.assertTrue( file_type.matches(tmp_file, file_type.FILE) )
def test_is_package(self): tmp_tarball = fake_package_unit_test.create_one_package(self._WATER) self.assertTrue(package.is_package(tmp_tarball)) self.assertFalse( package.is_package(temp_file.make_temp_file(content='notpackage')))
def test_parse_filename(self): filename = temp_file.make_temp_file(content=self.FOO_PC) cf = pkg_config_file() cf.parse_file(filename) self.assertEqual(self.FOO_EXPECTED_VARIABLES, cf.variables) self.assertEqual(self.FOO_EXPECTED_EXPORTS, cf.exports)
def test_parse_file(self): tmp = temp_file.make_temp_file(content = self.FOO_PC) cf = caca_pkg_config_file.parse_file(tmp) self.assertEqual( self.FOO_EXPECTED_VARIABLES, cf.variables ) self.assertEqual( self.FOO_EXPECTED_PROPERTIES, cf.properties )
def make_temp_archive_for_writing(self): tmp_archive = temp_file.make_temp_file(suffix = '.' + self.default_archive_type, delete = False) return self.make_archive(tmp_archive)
def create_one_package(clazz, recipe, metadata_mutations={}, debug=False): recipe = clazz._parse_one_recipe(recipe, metadata_mutations) tmp_file = temp_file.make_temp_file(delete=not debug) if debug: print('tmp_file: %s' % (tmp_file)) return recipe.create_package(tmp_file, debug=debug).filename
def main(): import bes vcli = version_cli(bes) parser = argparse.ArgumentParser() parser.add_argument('files', action = 'store', nargs = '*', help = 'Files or directories to rename') vcli.version_add_arguments(parser) parser.add_argument('--dry-run', '-n', action = 'store_true', default = False, help = 'Only print what files will get tests [ False ]') parser.add_argument('--timing', '-t', action = 'store_true', default = False, help = 'Show the amount of time it takes to run tests [ False ]') parser.add_argument('--verbose', '-v', action = 'store_true', default = False, help = 'Verbose debug output [ False ]') parser.add_argument('--stop', '-s', action = 'store_true', default = False, help = 'Stop right after the first failure. [ False ]') parser.add_argument('--randomize', action = 'store_true', default = False, help = 'Randomize the order in which unit tests run. [ False ]') parser.add_argument('--python', action = 'append', default = [], help = 'Python executable) to use. Multiple flags can be used for running with mutiple times with different python versions [ python ]') parser.add_argument('--page', '-p', action = 'store_true', default = False, help = 'Page output with $PAGER [ False ]') parser.add_argument('--profile', action = 'store', default = None, help = 'Profile the code with cProfile and store the output in the given argument [ None ]') parser.add_argument('--coverage', action = 'store', default = None, help = 'Run coverage on the code and store the output in the given argument [ None ]') parser.add_argument('--pager', action = 'store', default = os.environ.get('PAGER', 'more'), help = 'Pager to use when paging [ %s ]' % (os.environ.get('PAGER', 'more'))) parser.add_argument('--iterations', '-i', action = 'store', default = 1, type = int, help = 'Python executable to use [ python ]') parser.add_argument('--git', '-g', action = 'store_true', default = False, help = 'Use git status to figure out what has changed to test [ False ]') parser.add_argument('--pre-commit', action = 'store_true', default = False, help = 'Run pre commit checks [ False ]') parser.add_argument('--print-tests', action = 'store_true', default = False, help = 'Print the list of unit tests [ False ]') parser.add_argument('--print-files', action = 'store_true', default = False, help = 'Print the list of unit files [ False ]') parser.add_argument('--egg', action = 'store_true', default = False, help = 'Make an egg of the package and run the tests against that instead the live files. [ False ]') parser.add_argument('--save-egg', action = 'store_true', default = False, help = 'Save the egg in the current directory. [ False ]') parser.add_argument('--ignore', action = 'append', default = [], help = 'Patterns of filenames to ignore []') parser.add_argument('--root-dir', action = 'store', default = None, help = 'The root directory for all your projets. By default its computed from your git struture. [ None ]') parser.add_argument('--dont-hack-env', action = 'store_true', default = False, help = 'Dont hack PATH and PYTHONPATH. [ False ]') parser.add_argument('--compile-only', '-c', action = 'store_true', default = False, help = 'Just compile the files to verify syntax [ False ]') parser.add_argument('--print-deps', action = 'store_true', default = False, help = 'Print python dependencies for test files [ False ]') parser.add_argument('--print-configs', action = 'store_true', default = False, help = 'Print testing configs found [ False ]') parser.add_argument('--print-root-dir', action = 'store_true', default = False, help = 'Print the root dir [ False ]') parser.add_argument('--print-path', action = 'store_true', default = False, help = 'Print sys.path [ False ]') parser.add_argument('--file-ignore-file', action = 'append', default = [], help = 'List of file ignore files. [ .bes_test_ignore .bes_test_internal_ignore ]') parser.add_argument('--env', action = 'append', default = [], help = 'Environment variables to set [ None ]') parser.add_argument('--no-env-deps', action = 'store_true', default = False, help = 'Dont use env deps. [ False ]') parser.add_argument('--temp-dir', action = 'store', default = None, help = 'The directory to use for tmp files overriding the system default. [ None ]') for g in parser._action_groups: g._group_actions.sort(key = lambda x: x.dest) args = parser.parse_args() if args.temp_dir: file_util.mkdir(args.temp_dir) tempfile.tempdir = args.temp_dir if os.environ.get('DEBUG', False): args.verbose = True cwd = os.getcwd() if args.version: vcli.version_print_version() return 0 args.env = _parse_args_env(args.env) if not args.files: args.files = [ cwd ] if not args.file_ignore_file: args.file_ignore_file = [ '.bes_test_ignore', '.bes_test_internal_ignore' ] ar = argument_resolver(cwd, args.files, root_dir = args.root_dir, file_ignore_filename = args.file_ignore_file, check_git = args.git, use_env_deps = not args.no_env_deps) ar.num_iterations = args.iterations ar.randomize = args.randomize ar.ignore_with_patterns(args.ignore) if args.compile_only: total_files = len(ar.all_files) for i, f in enumerate(ar.all_files): tmp = temp_file.make_temp_file() filename_count_blurb = ' ' + _make_count_blurb(i + 1, total_files) short_filename = file_util.remove_head(f, cwd) blurb = '%7s:%s %s ' % ('compile', filename_count_blurb, short_filename) printer.writeln_name(blurb) py_compile.compile(f, cfile = tmp, doraise = True) return 0 if not ar.test_descriptions: return 1 if args.print_path: for p in sys.path: print(p) return 0 if args.print_configs: ar.print_configs() return 0 if args.print_root_dir: print(ar.root_dir) return 0 if args.print_files: ar.print_files() return 0 if args.print_tests: ar.print_tests() return 0 if args.print_deps or args.pre_commit and not ar.supports_test_dependency_files(): printer.writeln_name('ERROR: Cannot figure out dependencies. snakefood missing.') return 1 if args.print_deps: dep_files = ar.test_dependency_files() for filename in sorted(dep_files.keys()): print(filename) for dep_file in dep_files[filename]: print(' %s' % (dep_file.filename)) return 0 # Start with a clean environment so unit testing can be deterministic and not subject # to whatever the user happened to have exported. PYTHONPATH and PATH for dependencies # are set below by iterating the configs keep_keys = [ 'BES_LOG', 'BES_VERBOSE', 'BESCFG_PATH', 'DEBUG', 'BES_TEMP_DIR' ] if args.dont_hack_env: keep_keys.extend([ 'PATH', 'PYTHONPATH']) env = os_env.make_clean_env(keep_keys = keep_keys) env['PYTHONDONTWRITEBYTECODE'] = 'x' variables = { 'rebuild_dir': path.expanduser('~/.rebuild'), 'system': host.SYSTEM, } if not args.dont_hack_env: ar.update_environment(env, variables) # Update env with whatever was given in --env env.update(args.env) num_passed = 0 num_failed = 0 num_executed = 0 num_tests = len(ar.test_descriptions) failed_tests = [] # Remove current dir from sys.path to avoid side effects if cwd in sys.path: sys.path.remove(cwd) if args.egg: pythonpath = env_var(env, 'PYTHONPATH') pythonpath.remove(cwd) for config in ar.env_dependencies_configs: setup_dot_py = path.join(config.root_dir, 'setup.py') if not path.isfile(setup_dot_py): raise RuntimeError('No setup.py found in %s to make the egg.' % (cwd)) egg_zip = egg.make(setup_dot_py) pythonpath.prepend(egg_zip) printer.writeln_name('using tmp egg: %s' % (egg_zip)) if args.save_egg: file_util.copy(egg_zip, path.join(cwd, path.basename(egg_zip))) if args.pre_commit: missing_from_git = [] for filename, dep_files in ar.test_dependency_files().items(): for dep_file in dep_files: if dep_file.config and not dep_file.git_tracked: missing_from_git.append(dep_file.filename) if missing_from_git: for f in missing_from_git: printer.writeln_name('PRE_COMMIT: missing from git: %s' % (path.relpath(f))) return 1 return 0 ar.cleanup_python_compiled_files() # Do all our work with a temporary working directory to be able to check for side effects tmp_cwd = temp_file.make_temp_dir(prefix = 'bes_test_', suffix = '.tmp.dir', delete = False) os.chdir(tmp_cwd) # Use what the OS thinks the path is (to deal with symlinks and virtual tmpfs things) tmp_cwd = os.getcwd() if not args.dry_run and args.page: printer.OUTPUT = tempfile.NamedTemporaryFile(prefix = 'bes_test', delete = True, mode = 'w') total_tests = _count_tests(ar.inspect_map, ar.test_descriptions) total_files = len(ar.test_descriptions) total_num_tests = 0 if not args.python: args.python = [ 'python' ] if args.profile: args.profile = path.abspath(args.profile) if not _check_program('cprofilev'): return 1 if args.coverage: args.coverage = path.abspath(args.coverage) coverage_exe = _check_program('coverage') if not coverage_exe: return 1 args.python = [ coverage_exe ] if args.profile and args.coverage: printer.writeln_name('ERROR: --profile and --coverage are mutually exclusive.') return 1 options = test_options(args.dry_run, args.verbose, args.stop, args.timing, args.profile, args.coverage, args.python, args.temp_dir) timings = {} total_time_start = time.time() stopped = False for i, test_desc in enumerate(ar.test_descriptions): file_info = test_desc.file_info filename = file_info.filename if not filename in timings: timings[filename] = [] for python_exe in args.python: result = _test_execute(python_exe, ar.inspect_map, filename, test_desc.tests, options, i + 1, total_files, cwd, env) timings[filename].append(result.elapsed_time) total_num_tests += result.num_tests_run num_executed += 1 if result.success: num_passed += 1 else: num_failed += 1 failed_tests.append(( python_exe, filename, result )) if args.stop and not result.success: stopped = True if stopped: break total_elapsed_time = 1000 * (time.time() - total_time_start) if args.dry_run: return 0 num_skipped = num_tests - num_executed summary_parts = [] if total_num_tests == total_tests: function_summary = '(%d %s)' % (total_tests, _make_test_string(total_tests)) else: function_summary = '(%d of %d %s)' % (total_num_tests, total_tests, _make_test_string(total_tests)) if num_failed > 0: summary_parts.append('%d of %d fixtures FAILED' % (num_failed, num_tests)) summary_parts.append('%d of %d passed %s' % (num_passed, num_tests, function_summary)) if num_skipped > 0: summary_parts.append('%d of %d skipped' % (num_skipped, num_tests)) summary = '; '.join(summary_parts) printer.writeln_name('%s' % (summary)) if failed_tests: longest_python_exe = max([len(path.basename(p)) for p in options.interpreters]) for python_exe, filename, result in failed_tests: if len(options.interpreters) > 1: python_exe_blurb = path.basename(python_exe).rjust(longest_python_exe) else: python_exe_blurb = '' error_status = unit_test_output.error_status(result.output) for error in error_status.errors: printer.writeln_name('%5s: %s %s :%s.%s' % (error.error_type, python_exe_blurb, file_util.remove_head(filename, cwd), error.fixture, error.function)) if num_failed > 0: rv = 1 else: rv = 0 if args.timing: filenames = sorted(timings.keys()) num_filenames = len(filenames) for i, filename in zip(range(0, num_filenames), filenames): short_filename = file_util.remove_head(filename, cwd) all_timings = timings[filename] num_timings = len(all_timings) avg_ms = _timing_average(all_timings) * 1000.0 if num_timings > 1: run_blurb = '(average of %d runs)' % (num_timings) else: run_blurb = '' if num_filenames > 1: count_blurb = '[%s of %s] ' % (i + 1, num_filenames) else: count_blurb = '' printer.writeln_name('timing: %s%s - %2.2f ms %s' % (count_blurb, short_filename, avg_ms, run_blurb)) if total_elapsed_time >= 1000.0: printer.writeln_name('total time: %2.2f s' % (total_elapsed_time / 1000.0)) else: printer.writeln_name('total time: %2.2f ms' % (total_elapsed_time)) if args.page: subprocess.call([ args.pager, printer.OUTPUT.name ]) current_cwd = os.getcwd() if current_cwd != tmp_cwd: printer.writeln_name('SIDE EFFECT: working directory was changed from %s to %s' % (tmp_cwd, current_cwd)) droppings = file_find.find(current_cwd, relative = False, file_type = file_find.ANY) for dropping in droppings: printer.writeln_name('SIDE EFFECT: dropping found: %s' % (dropping)) if not droppings: os.chdir('/tmp') file_util.remove(tmp_cwd) return rv
def extract_member_to_string(self, member): tmp_file = temp_file.make_temp_file() self.extract_member_to_file(member, tmp_file) result = file_util.read(tmp_file) file_util.remove(tmp_file) return result
def test_create_package(self): recipe = ''' fake_package knife 6.6.6 0 0 linux release x86_64 ubuntu 18 files bin/cut.sh \#!/bin/sh echo cut.sh: ${1+"$@"} ; exit 0 c_programs bin/cut.exe sources main.c \#include <stdio.h> int main(int argc, char* argv[]) { char** arg; if (argc < 2) { fprintf(stderr, "Usage: cut.exe args\\n"); return 1; } fprintf(stdout, "cut.exe: "); for(arg = argv + 1; *arg != NULL; ++arg) { fprintf(stdout, "%s ", *arg); } fprintf(stdout, "\\n"); return 0; } bin/links_with_static.exe sources main.c \#include <libfoo_static.h> \#include <stdio.h> int main() { printf("%d\\n", foo_static(10)); return 0; } ldflags -lfoo_static bin/links_with_shared.exe sources main.c \#include <libfoo_shared.h> \#include <stdio.h> int main() { printf("%d\\n", foo_shared(20)); return 0; } ldflags -lfoo_shared static_c_libs lib/libfoo_static.a sources foo.c \#include <libfoo_static.h> const int FOO_STATIC_MAGIC_NUMBER = 1; int foo_static(int x) { return x + FOO_STATIC_MAGIC_NUMBER; } headers include/libfoo_static.h \#ifndef __FOO_STATIC_H__ \#define __FOO_STATIC_H__ extern const int FOO_STATIC_MAGIC_NUMBER; extern int foo_static(int x); \#endif /* __FOO_STATIC_H__ */ shared_c_libs lib/libfoo_shared.so sources foo2.c \#include <libfoo_shared.h> int foo_shared(int x) { return x + FOO_SHARED_MAGIC_NUMBER; } headers include/libfoo_shared.h \#ifndef __FOO_SHARED_H__ \#define __FOO_SHARED_H__ \#define FOO_SHARED_MAGIC_NUMBER 2 extern int foo_shared(int x); \#endif /* __FOO_SHARED_H__ */ ''' tmp = temp_file.make_temp_file(suffix='.tar.gz', delete=not self.DEBUG) filename, metadata = self._parse(recipe)[0].create_package( tmp, debug=self.DEBUG) if self.DEBUG: print('tmp:\n%s' % (tmp)) # Assert that the package has exactly the members expected self.assertEqual([ 'files/bin/cut.exe', 'files/bin/cut.sh', 'files/bin/links_with_shared.exe', 'files/bin/links_with_static.exe', 'files/include/libfoo_shared.h', 'files/include/libfoo_static.h', 'files/lib/libfoo_shared.so', 'files/lib/libfoo_static.a', 'metadata/metadata.json', ], archiver.members(tmp)) p = package(tmp) self.assertEqual([ 'bin/cut.exe', 'bin/cut.sh', 'bin/links_with_shared.exe', 'bin/links_with_static.exe', 'include/libfoo_shared.h', 'include/libfoo_static.h', 'lib/libfoo_shared.so', 'lib/libfoo_static.a', ], p.files)