def test_cached_filename(self): tmp_root_dir = temp_file.make_temp_dir(prefix = 'test_cached_root_', suffix = '.dir', delete = not self.DEBUG) tmp_filename = temp_file.make_temp_file(content = 'foo\n', delete = not self.DEBUG) if self.DEBUG: print("\ntmp_root_dir: ", tmp_root_dir) print("tmp_filename: ", tmp_filename) expected_content = file_util.read(tmp_filename) cached_filename = file_cache.cached_filename(tmp_filename, root_dir = tmp_root_dir) actual_content = file_util.read(cached_filename) self.assertEqual( expected_content, actual_content ) self.assertNotEqual( tmp_filename, cached_filename )
def extract(self, where): for filename in self._FILES: content = pkgutil.get_data('bes', filename) dst_path = path.join(where, filename) file_util.save(dst_path, content = content, mode = 0o755) if file_util.read(dst_path) != content: raise RuntimeError('Failed to save %s to %s.' % (filename, dst_path))
def test_patch(self): p = self.data_path('src_to_dst.patch') src = self.data_path('src.txt') dst = self.data_path('dst.txt') tmp_dir = temp_file.make_temp_dir() tmp_src = path.join(tmp_dir, 'src.txt') backup_src = tmp_src + '.orig' file_util.copy(src, tmp_src) patch.patch(p, cwd=tmp_dir, strip=0, backup=True, posix=True) self.assertEqual(file_util.read(dst), file_util.read(tmp_src)) self.assertTrue(path.exists(backup_src)) self.assertEqual(file_util.read(src), file_util.read(backup_src))
def parse_file(self, filename): self.__reset() try: return self.parse_string(file_util.read(filename).decode('utf-8')) except: print("failed loading %s" % (filename)) raise
def test_cached_content(self): tmp_root_dir = temp_file.make_temp_dir(prefix = 'test_cached_root_', suffix = '.dir', delete = not self.DEBUG) tmp_filename = temp_file.make_temp_file(content = 'foo\n', delete = not self.DEBUG) expected_content = file_util.read(tmp_filename) actual_content = file_cache.cached_content(tmp_filename, root_dir = tmp_root_dir) self.assertEqual( expected_content, actual_content )
def __new__(clazz, filename): filename = path.abspath(filename) check.check_string(filename) file_check.check_file(filename) content = file_util.read(filename, codec = 'utf-8') root_dir = path.normpath(path.join(path.dirname(filename), '..')) data = config_data.parse(content, filename = filename) return clazz.__bases__[0].__new__(clazz, root_dir, filename, data)
def test_bes_sudo_editor_py(self): mode = 0o440 sudoers_tmp = temp_file.make_temp_file(content=self.__SUDOERS_UBUNTU, delete=not self.DEBUG) os.chmod(sudoers_tmp, mode) cmd = [ self._BES_SUDO_EDITOR_PY, '--sudoers', sudoers_tmp, 'chupacabra', '/usr/sbin/chroot', '1', ] rv = os_env.call_python_script(cmd) if rv.exit_code != 0: raise RuntimeError('Failed to exectute \"%s\": %s' % (' '.join(cmd), rv.stdout)) self.assertEquals(0, rv.exit_code) self.assertEquals(mode, file_util.mode(sudoers_tmp)) actual_content = file_util.read(sudoers_tmp, codec='utf-8') difference = actual_content.replace(self.__SUDOERS_UBUNTU, '').strip() self.assertEquals( 'chupacabra ALL = (root) NOPASSWD: /usr/sbin/chroot # bes_sudo:v1', difference) cmd = [ self._BES_SUDO_EDITOR_PY, '--sudoers', sudoers_tmp, 'tjefferson', '/bin/cat', '1', ] rv = os_env.call_python_script(cmd) if rv.exit_code != 0: raise RuntimeError('Failed to exectute \"%s\": %s' % (' '.join(cmd), rv.stdout)) self.assertEquals(0, rv.exit_code) actual_content = file_util.read(sudoers_tmp, codec='utf-8') difference = actual_content.replace(self.__SUDOERS_UBUNTU, '').strip() self.assertEquals( 'chupacabra ALL = (root) NOPASSWD: /usr/sbin/chroot # bes_sudo:v1\ntjefferson ALL = (root) NOPASSWD: /bin/cat # bes_sudo:v1', difference)
def write_file(self, filename, backup=True): new_content = str(self) if path.exists(filename): old_content = file_util.read(filename) if new_content == old_content: return False if backup: file_util.backup(filename) file_util.save(filename, new_content) return True
def __init__(self, arcname, content = None, filename = None): assert content or filename if content: assert not filename if filename: assert not content self.arcname = arcname self.content = content self.filename = filename if self.filename: self.content = file_util.read(self.filename)
def read_easy_install_pth(clazz, filename): content = file_util.read(filename).strip() lines = content.decode('utf-8').split('\n') if len(lines) < 2: raise RuntimeError('Invalid easy-install.pth(1): %s' % (filename)) if not lines[0].startswith('import sys'): raise RuntimeError('Invalid easy-install.pth(2): %s' % (filename)) if not lines[-1].startswith('import sys'): raise RuntimeError('Invalid easy-install.pth(3): %s' % (filename)) eggs = lines[1:-1] return [ path.normpath(egg) for egg in eggs ]
def inspect_file_old(clazz, filename): code = file_util.read(filename) tree = ast.parse(code, filename = filename) s = ast.dump(tree, annotate_fields = True, include_attributes = True) result = [] for node in tree.body: if clazz._node_is_unit_test_class(node): for statement in node.body: if isinstance(statement, ast.FunctionDef): if statement.name.startswith('test_'): result.append(unit_test_description(filename, node.name, statement.name)) return sorted(result, key = lambda x: x.function)
def test_extract_member_to_file(self): assert self.default_archive_type items = temp_archive.make_temp_item_list([ ( 'foo/apple.txt', 'apple.txt\n' ), ( 'foo/durian.txt', 'durian.txt\n' ), ( 'foo/kiwi.txt', 'kiwi.txt\n' ), ( 'metadata/db.json', '{}\n' ), ]) tmp_archive = self.make_temp_archive_for_reading(items) tmp_file = temp_file.make_temp_file() tmp_archive.extract_member_to_file('foo/apple.txt', tmp_file) self.assertEqual( b'apple.txt\n', file_util.read(tmp_file) )
def test_make_temp_archive_from_file(self): tmp_file = temp_file.make_temp_file(content = 'foo.txt\n', suffix = '.foo.txt') tmp_archive = self._make_temp_archive('tgz', items = [ temp_archive.item('foo.txt', filename = tmp_file) ]) self.assertTrue( path.isfile(tmp_archive.filename) ) self.assertTrue( tarfile.is_tarfile(tmp_archive.filename) ) self.assertFalse( zipfile.is_zipfile(tmp_archive.filename) ) tmp_dir = temp_file.make_temp_dir() with tarfile.open(tmp_archive.filename, mode = 'r') as archive: archive.extractall(path = tmp_dir) tmp_member_path = path.join(tmp_dir, 'foo.txt') self.assertTrue( path.isfile(tmp_member_path) ) self.assertEqual( b'foo.txt\n', file_util.read(tmp_member_path) )
def update_site_dot_py(clazz, d): if not path.exists(d): return if not path.isdir(d): raise RuntimeError('Not a directory: %s' % (d)) site_py_path = path.join(d, clazz.SITE_DOT_PY_FILENAME) old_content = None if path.exists(site_py_path): if not path.isfile(site_py_path): raise RuntimeError('Not a regular file: %s' % (site_py_path)) old_content = file_util.read(site_py_path) if old_content == clazz.SITE_DOT_PY_CONTENT: return file_util.save(site_py_path, content = clazz.SITE_DOT_PY_CONTENT, mode = 0o644)
def _load_db_local(self): if not path.isfile(self._local_db_file_path): self.blurb('pcloud: not local db found at: %s' % (self._local_db_file_path)) return source_finder_db_dict() try: self.blurb('pcloud: using local db: %s' % (path.relpath(self._local_db_file_path))) content = file_util.read(self._local_db_file_path) return source_finder_db_dict.from_json(content) except Exception as ex: self.blurb('pcloud: local db is corrupt: %s' % (self._local_db_file_path)) return source_finder_db_dict()
def test_save_executable(self): template = '#!/bin/bash\necho @FOO@ @BAR@' basename = 'foo.sh' script = manager_script(template, basename) tmp_root_dir = temp_file.make_temp_dir() variables = { '@FOO@': 'foo', '@BAR@': 'bar', } save_rv = script.save(tmp_root_dir, variables) tmp_filename = path.join(tmp_root_dir, basename) self.assertTrue(path.exists(tmp_filename)) content = file_util.read(tmp_filename, 'utf-8') expected_content = '#!/bin/bash\necho foo bar' self.assertEqual(expected_content, content) self.assertEqual(file_util.mode(tmp_filename), 0o755)
def test_save_checksums_file(self): tmp_file = temp_file.make_temp_file() a = FCL.from_files([ 'a.txt', 'b.txt' ], root_dir = self.data_dir()) a.save_checksums_file(tmp_file) expected = '''\ [ [ "a.txt", "7bf1c5c4153ecb5364b0c7fcb2e767fadc6880e0c2620b69df56b6bb5429448d" ], [ "b.txt", "429340c7abf63fe50eb3076d3f1d5d996f3b4ee3067734ae8832129af244653c" ] ]''' self.assertEqual( expected, file_util.read(tmp_file, codec = 'utf8') )
def get_data_content(clazz, data_path, filename, module_name): try: print('data_path=%s, filename=%s, module_name=%s' % (data_path, filename, module_name)) inside_egg, data = clazz._resolve_data(data_path, filename, module_name) print('inside_egg=%s, data=%s' % (inside_egg, data)) if not inside_egg: if not path.isfile(data): raise RuntimeError('Not a file: %s' % (data)) result = file_util.read(data) if not result: raise RuntimeError('Failed to read: %s' % (data)) return result return data except Exception as ex: sys.stderr.write('package: caught exception: %s\n' % (str(ex))) sys.stdout.flush() return None
def main(): ap = argparse.ArgumentParser() ap.add_argument('filename', type = str, action = 'store', help = 'The platforms.txt file.') ap.add_argument('output', type = str, action = 'store', help = 'The platforms.txt file.') args = ap.parse_args() content = file_util.read(args.filename) parser = TLP(content) lines = TLP.parse_lines(content, strip_comments = True, strip_text = True, remove_empties = True) infos = parse_host_infos(lines) s = make_host_info_python_list(infos) file_util.save(args.output, content = s) # j = json_util.to_json(infos, indent = 2) # for info in infos: # print(host_info_to_python_str(info)) # print(j) return 0
def main(): root = os.getcwd() make_template_tarball(root, 'template', '1.0.0') make_template_tarball(root, 'templatedepends', '1.2.3') PACKAGES = [ ( 'fructose-3.4.5-6', 'template', '1.0.0', {} ), ( 'mercury-1.2.8-0', 'template', '1.0.0', {} ), ( 'arsenic-1.2.9-0', 'template', '1.0.0', {} ), ( 'fiber-1.0.0-0', 'template', '1.0.0', {} ), ( 'water-1.0.0-0', 'template', '1.0.0', {} ), ( 'fruit-1.0.0', 'templatedepends', '1.2.3', { # '#@REB_20@': 'PKG_CHECK_MODULES([CACA], [caca])', '/*@fruit1_dot_c@*/': 'file:template/code/fruit/fruit1.c', '/*@fruit1_dot_h@*/': 'file:template/code/fruit/fruit1.h', '/*@fruit2_dot_c@*/': 'file:template/code/fruit/fruit2.c', '/*@fruit2_dot_h@*/': 'file:template/code/fruit/fruit2.h', } ), ( 'pear-1.2.3-1', 'templatedepends', '1.2.3', { '/*@pear1_dot_c@*/': 'file:template/code/pear/pear1.c', '/*@pear1_dot_h@*/': 'file:template/code/pear/pear1.h', '/*@pear2_dot_c@*/': 'file:template/code/pear/pear2.c', '/*@pear2_dot_h@*/': 'file:template/code/pear/pear2.h', } ), ( 'orange-6.5.4-3', 'templatedepends', '1.2.3', { '/*@orange1_dot_c@*/': 'file:template/code/orange/orange1.c', '/*@orange1_dot_h@*/': 'file:template/code/orange/orange1.h', '/*@orange2_dot_c@*/': 'file:template/code/orange/orange2.c', '/*@orange2_dot_h@*/': 'file:template/code/orange/orange2.h', } ), ( 'apple-1.2.3-1', 'templatedepends', '1.2.3', { '/*@smoothie1_dot_c@*/': 'file:template/code/smoothie/smoothie1.c', '/*@smoothie1_dot_h@*/': 'file:template/code/smoothie/smoothie1.h', '/*@smoothie2_dot_c@*/': 'file:template/code/smoothie/smoothie2.c', '/*@smoothie2_dot_h@*/': 'file:template/code/smoothie/smoothie2.h', } ), ] xPACKAGES = [ ( 'pear-1.2.3-1', 'templatedepends', '1.2.3', { '/*@pear1_dot_c@*/': 'file:template/code/pear/pear1.c', '/*@pear1_dot_h@*/': 'file:template/code/pear/pear1.h', '/*@pear2_dot_c@*/': 'file:template/code/pear/pear2.c', '/*@pear2_dot_h@*/': 'file:template/code/pear/pear2.h', } ), ] for _, _, _, more_replacements in PACKAGES: for key, value in more_replacements.items(): #substitute(clazz, s, d): if value.startswith('file:'): filename = value.partition(':')[2] more_replacements[key] = file_util.read(filename) pc_files_dir = path.join(root, '../pkg_config/dependency_tests') for package, template_name, template_version, more_replacements in PACKAGES: template_tarball = path.join(root, '%s-%s.tar.gz' % (template_name, template_version)) tmp_dir = temp_file.make_temp_dir(delete = not DEBUG) if DEBUG: print('DEBUG1: tmp_dir=%s' % (tmp_dir)) print('F**K: %s' % (package)) desc = unit_test_packages.TEST_PACKAGES[package] print('desc: %s' % (str(desc))) pi = desc #desc['package_info'] version_no_revision = '%s-%s' % (pi.name, pi.version) archiver.extract(template_tarball, tmp_dir, base_dir = 'foo', strip_common_ancestor = True) working_dir = path.join(tmp_dir, 'foo') if DEBUG: print('working_dir=%s' % (working_dir)) refactor_files.refactor(template_name, pi.name, [ working_dir ]) file_paths = [ path.join(working_dir, 'configure.ac'), path.join(working_dir, 'libs/%s1/%s1.c' % (pi.name, pi.name)), path.join(working_dir, 'libs/%s1/%s1.h' % (pi.name, pi.name)), path.join(working_dir, 'libs/%s2/%s2.c' % (pi.name, pi.name)), path.join(working_dir, 'libs/%s2/%s2.h' % (pi.name, pi.name)), ] default_replacements = make_default_replacements(DEFAULT_REPLACEMENTS, pi.name) print('F**K: %s' % (default_replacements)) replacements = {} replacements.update(default_replacements) replacements.update({ '[%s]' % (template_version): '[%s]' % (pi.version) }) replacements.update(more_replacements) for k, v in sorted(replacements.items()): print('REPLACEMENTS: %s: %s' % (k, v)) for f in file_paths: file_replace.replace(f, replacements, backup = False) command = [ 'cd %s' % (working_dir), 'automake -a', 'autoconf', './configure', 'make dist', 'cp %s.tar.gz %s' % (version_no_revision, root), ] env = os_env.make_clean_env(keep_keys = [ 'PATH', 'PKG_CONFIG_PATH' ]) env['GZIP'] = '-n' flat_command = ' && '.join(command) execute.execute(flat_command, shell = True, non_blocking = True, env = env) pc_files = file_find.find_fnmatch(working_dir, [ '*.pc' ], relative = False) for pc_file in pc_files: dst_pc_file = path.join(pc_files_dir, path.basename(pc_file)) file_util.copy(pc_file, dst_pc_file)
def main(self): args = self.parser.parse_args() bt = self.build_target_resolve(args) args.verbose = bool(args.verbose) if args.change_dir: os.chdir(args.change_dir) target_packages = args.target_packages[0] available_packages = self.load_project_file(args.project_file) if args.filter: if path.isfile(args.filter[0]): target_packages_filter = file_util.read( args.filter[0]).split('\n') else: target_packages_filter = args.filter[0].split(',') target_packages_filter = [p for p in target_packages_filter if p] available_packages = self._filter_target_packages( available_packages, target_packages_filter) args.system = build_system.parse_system(args.system) args.level = build_level.parse_level(args.level) args.arch = build_arch.parse_arch(args.arch, args.system, args.distro) build_blurb.set_process_name('rebuild') build_blurb.set_verbose(args.verbose) # Tests only run on desktop if not bt.is_desktop(): args.skip_tests = True if args.download_only and args.no_network: build_blurb.blurb( 'rebuild', 'Only one of --download-only and --no-net can be given.') return 1 config = builder_config() config.build_root = path.abspath(path.abspath(args.root)) config.build_target = bt config.deps_only = args.deps_only config.recipes_only = args.recipes_only config.disabled = args.disabled config.keep_going = args.keep_going config.no_checksums = args.no_checksums config.no_network = args.no_network config.skip_tests = args.skip_tests config.skip_to_step = args.skip_to_step config.source_dir = args.source_dir config.source_pcloud = args.source_pcloud config.tools_only = args.tools_only config.source_git = args.source_git config.users = args.users config.verbose = args.verbose config.wipe = args.wipe config.scratch = args.scratch config.third_party_prefix = args.third_party_prefix if args.timestamp: config.timestamp = args.timestamp config.performance = args.performance config.download_only = args.download_only config.artifacts_dir = args.artifacts env = builder_env(config, available_packages) bldr = builder(env) resolved_args = bldr.check_and_resolve_cmd_line_args(target_packages) if args.targets: build_blurb.blurb('rebuild', ' '.join(bldr.package_names()), fit=True) return 1 build_blurb.blurb( 'rebuild', 'target=%s; host=%s' % (config.build_target.build_path, config.host_build_target.build_path)) build_blurb.blurb_verbose('rebuild', 'command line: %s' % (' '.join(sys.argv))) if resolved_args.invalid_args: build_blurb.blurb( 'rebuild', 'Invalid targets: %s' % (' '.join(resolved_args.invalid_args))) build_blurb.blurb('rebuild', 'possible targets:') build_blurb.blurb('rebuild', ' '.join(bldr.package_names()), fit=True) return 1 if config.recipes_only: return bldr.EXIT_CODE_SUCCESS return bldr.build_many_scripts(resolved_args.package_names)
def extract_member_to_string(self, member): tmp_file = temp_file.make_temp_file() self.extract_member_to_file(member, tmp_file) result = file_util.read(tmp_file) file_util.remove(tmp_file) return result
def _content_changed(clazz, filename, content): if not path.isfile(filename): return True return file_util.read(filename) != content
def load(self, cached_filename): return file_util.read(cached_filename)
def test_update_site_dot_py(self): tmp_dir = temp_file.make_temp_dir() setup_tools.update_site_dot_py(tmp_dir) site_py_path = path.join(tmp_dir, setup_tools.SITE_DOT_PY_FILENAME) self.assertEqual( setup_tools.SITE_DOT_PY_CONTENT.encode('utf-8'), file_util.read(site_py_path) )
def load_file(clazz, filename): return clazz.parse(file_util.read(filename, codec='utf-8'))
def read_patch(clazz, patch): 'Return the content of a patch. Patch can be compressed.' if clazz.patch_is_compressed(patch): return compressed_file.read(patch) return file_util.read(patch, codec='utf-8')
def from_file(clazz, filename): content = file_util.read(filename) d = json.loads(content) return clazz.parse_dict(d)
def parse_file(clazz, filename): text = file_util.read(filename).decode('utf-8') return clazz.parse_text(filename, text)
def _load_recipes_v2(clazz, env, filename): content = file_util.read(filename, codec='utf8') parser = recipe_parser(filename, content) recipes = parser.parse() return clazz._recipes(filename, recipes)
def from_file(clazz, filename): return clazz.from_json(file_util.read(filename))