def save_build_dir_logs(clazz, env, filenames): 'Save any logs that exists to the logs/ dir' for filename in filenames: src = path.join(env.build_dir, filename) dst = path.join(env.logs_dir, path.basename(src)) if path.isfile(src): file_util.copy(src, dst)
def _make_standalone_python(self, program, script, env): src_basename = path.basename(program.filename) dst_basename = path.basename(program.dst_filename) if dst_basename.endswith('.py'): return step_result( False, 'dst program should not end in .py: %s' % (dst_basename)) src_program = variable.substitute(program.filename, script.substitutions) if not path.isabs(src_program): src_program = path.join(script.build_dir, src_program) if not path.isfile(src_program): return step_result(False, 'src program not found: %s' % (src_program)) tmp_src_program = path.join(script.build_dir, dst_basename + '.py') file_util.copy(src_program, tmp_src_program) dst_program = path.join(script.build_dir, 'dist', dst_basename) cmd = 'pyinstaller --log INFO -F %s' % (tmp_src_program) rv = self.call_shell(cmd, script, env) if not rv.success: return rv if not path.isfile(dst_program): return step_result(False, 'dst program not found: %s' % (dst_program)) installed_program = path.join(script.staged_files_dir, program.dst_filename) file_util.mkdir(path.dirname(installed_program)) file_util.copy(dst_program, installed_program) os.chmod(installed_program, 0o755) return step_result(True, None)
def publish_artifacts(clazz, am): artifacts = file_find.find_fnmatch(am.root_dir, [ '*.tar.gz' ], relative = False) for artifact in artifacts: tmp_artifact = temp_file.make_temp_file() file_util.copy(artifact, tmp_artifact) file_util.remove(artifact) p = package(tmp_artifact) am.publish(tmp_artifact, p.metadata.build_target, False)
def create(self, root_dir, base_dir = None, extra_items = None, include = None, exclude = None): self._pre_create() items = self._find(root_dir, base_dir, extra_items, include, exclude) tmp_dir = temp_file.make_temp_dir() for item in items: file_util.copy(item.filename, path.join(tmp_dir, item.arcname)) cmd = 'hdiutil create -srcfolder %s -ov -format UDZO %s' % (tmp_dir, self.filename) execute.execute(cmd) file_util.remove(tmp_dir)
def create(self, root_dir, base_dir = None, extra_items = None, include = None, exclude = None): self._pre_create() items = self._find(root_dir, base_dir, extra_items, include, exclude) tmp_dir = temp_file.make_temp_dir() for item in items: file_util.copy(item.filename, path.join(tmp_dir, item.arcname)) manifest_content = '\n'.join([ item.arcname for item in items ]) manifest = temp_file.make_temp_file(content = manifest_content) cmd = 'tar Jcf %s -C %s -T %s' % (self.filename, tmp_dir, manifest) execute.execute(cmd) file_util.remove(tmp_dir)
def create(self, root_dir, base_dir = None, extra_items = None, include = None, exclude = None): items = self._find(root_dir, base_dir, extra_items, include, exclude) ext = archive_extension.extension_for_filename(self.filename) mode = archive_extension.write_format_for_filename(self.filename) # print('CACA: ext=%s' % (ext)) # print('CACA: mode=%s' % (mode)) tmp_dir = temp_file.make_temp_dir() for item in items: file_util.copy(item.filename, path.join(tmp_dir, item.arcname)) manifest_content = '\n'.join([ item.arcname for item in items ]) manifest = temp_file.make_temp_file(content = manifest_content) cmd = 'tar Jcf %s -C %s -T %s' % (self.filename, tmp_dir, manifest) execute.execute(cmd) file_util.remove(tmp_dir)
def test_patch(self): p = self.data_path('src_to_dst.patch') src = self.data_path('src.txt') dst = self.data_path('dst.txt') tmp_dir = temp_file.make_temp_dir() tmp_src = path.join(tmp_dir, 'src.txt') backup_src = tmp_src + '.orig' file_util.copy(src, tmp_src) patch.patch(p, cwd=tmp_dir, strip=0, backup=True, posix=True) self.assertEqual(file_util.read(dst), file_util.read(tmp_src)) self.assertTrue(path.exists(backup_src)) self.assertEqual(file_util.read(src), file_util.read(backup_src))
def publish(self, tarball, build_target, allow_replace, metadata): check.check_build_target(build_target) if self._read_only: raise RuntimeError('artifact_manager is read only.') if not metadata: metadata = package(tarball).metadata check.check_package_metadata(metadata) pkg_desc = metadata.package_descriptor artifact_path_rel, artifact_path_abs = self._artifact_paths( pkg_desc, build_target) file_util.copy(tarball, artifact_path_abs, use_hard_link=True) self._reset_requirement_managers() pkg_metadata = metadata.clone_with_filename(artifact_path_rel) should_replace = allow_replace and self._db.has_artifact( pkg_metadata.artifact_descriptor) if should_replace: self._db.replace_artifact(pkg_metadata) else: self._db.add_artifact(pkg_metadata) return artifact_path_abs
def _make_standalone_shell_script(self, program, script, env): src_basename = path.basename(program.filename) dst_basename = path.basename(program.dst_filename) if dst_basename.endswith('.sh'): return step_result( False, 'dst program should not end in .sh: %s' % (dst_basename)) src_program = variable.substitute(program.filename, script.substitutions) if not path.isabs(src_program): src_program = path.join(script.build_dir, src_program) if not path.isfile(src_program): return step_result(False, 'src program not found: %s' % (src_program)) installed_program = path.join(script.staged_files_dir, program.dst_filename) file_util.mkdir(path.dirname(installed_program)) file_util.copy(src_program, installed_program) os.chmod(installed_program, 0o755) return step_result(True, None)
def main(): import bes vcli = version_cli(bes) parser = argparse.ArgumentParser() parser.add_argument('files', action = 'store', nargs = '*', help = 'Files or directories to rename') vcli.version_add_arguments(parser) parser.add_argument('--dry-run', '-n', action = 'store_true', default = False, help = 'Only print what files will get tests [ False ]') parser.add_argument('--timing', '-t', action = 'store_true', default = False, help = 'Show the amount of time it takes to run tests [ False ]') parser.add_argument('--verbose', '-v', action = 'store_true', default = False, help = 'Verbose debug output [ False ]') parser.add_argument('--stop', '-s', action = 'store_true', default = False, help = 'Stop right after the first failure. [ False ]') parser.add_argument('--randomize', action = 'store_true', default = False, help = 'Randomize the order in which unit tests run. [ False ]') parser.add_argument('--python', action = 'append', default = [], help = 'Python executable) to use. Multiple flags can be used for running with mutiple times with different python versions [ python ]') parser.add_argument('--page', '-p', action = 'store_true', default = False, help = 'Page output with $PAGER [ False ]') parser.add_argument('--profile', action = 'store', default = None, help = 'Profile the code with cProfile and store the output in the given argument [ None ]') parser.add_argument('--coverage', action = 'store', default = None, help = 'Run coverage on the code and store the output in the given argument [ None ]') parser.add_argument('--pager', action = 'store', default = os.environ.get('PAGER', 'more'), help = 'Pager to use when paging [ %s ]' % (os.environ.get('PAGER', 'more'))) parser.add_argument('--iterations', '-i', action = 'store', default = 1, type = int, help = 'Python executable to use [ python ]') parser.add_argument('--git', '-g', action = 'store_true', default = False, help = 'Use git status to figure out what has changed to test [ False ]') parser.add_argument('--pre-commit', action = 'store_true', default = False, help = 'Run pre commit checks [ False ]') parser.add_argument('--print-tests', action = 'store_true', default = False, help = 'Print the list of unit tests [ False ]') parser.add_argument('--print-files', action = 'store_true', default = False, help = 'Print the list of unit files [ False ]') parser.add_argument('--egg', action = 'store_true', default = False, help = 'Make an egg of the package and run the tests against that instead the live files. [ False ]') parser.add_argument('--save-egg', action = 'store_true', default = False, help = 'Save the egg in the current directory. [ False ]') parser.add_argument('--ignore', action = 'append', default = [], help = 'Patterns of filenames to ignore []') parser.add_argument('--root-dir', action = 'store', default = None, help = 'The root directory for all your projets. By default its computed from your git struture. [ None ]') parser.add_argument('--dont-hack-env', action = 'store_true', default = False, help = 'Dont hack PATH and PYTHONPATH. [ False ]') parser.add_argument('--compile-only', '-c', action = 'store_true', default = False, help = 'Just compile the files to verify syntax [ False ]') parser.add_argument('--print-deps', action = 'store_true', default = False, help = 'Print python dependencies for test files [ False ]') parser.add_argument('--print-configs', action = 'store_true', default = False, help = 'Print testing configs found [ False ]') parser.add_argument('--print-root-dir', action = 'store_true', default = False, help = 'Print the root dir [ False ]') parser.add_argument('--print-path', action = 'store_true', default = False, help = 'Print sys.path [ False ]') parser.add_argument('--file-ignore-file', action = 'append', default = [], help = 'List of file ignore files. [ .bes_test_ignore .bes_test_internal_ignore ]') parser.add_argument('--env', action = 'append', default = [], help = 'Environment variables to set [ None ]') parser.add_argument('--no-env-deps', action = 'store_true', default = False, help = 'Dont use env deps. [ False ]') parser.add_argument('--temp-dir', action = 'store', default = None, help = 'The directory to use for tmp files overriding the system default. [ None ]') for g in parser._action_groups: g._group_actions.sort(key = lambda x: x.dest) args = parser.parse_args() if args.temp_dir: file_util.mkdir(args.temp_dir) tempfile.tempdir = args.temp_dir if os.environ.get('DEBUG', False): args.verbose = True cwd = os.getcwd() if args.version: vcli.version_print_version() return 0 args.env = _parse_args_env(args.env) if not args.files: args.files = [ cwd ] if not args.file_ignore_file: args.file_ignore_file = [ '.bes_test_ignore', '.bes_test_internal_ignore' ] ar = argument_resolver(cwd, args.files, root_dir = args.root_dir, file_ignore_filename = args.file_ignore_file, check_git = args.git, use_env_deps = not args.no_env_deps) ar.num_iterations = args.iterations ar.randomize = args.randomize ar.ignore_with_patterns(args.ignore) if args.compile_only: total_files = len(ar.all_files) for i, f in enumerate(ar.all_files): tmp = temp_file.make_temp_file() filename_count_blurb = ' ' + _make_count_blurb(i + 1, total_files) short_filename = file_util.remove_head(f, cwd) blurb = '%7s:%s %s ' % ('compile', filename_count_blurb, short_filename) printer.writeln_name(blurb) py_compile.compile(f, cfile = tmp, doraise = True) return 0 if not ar.test_descriptions: return 1 if args.print_path: for p in sys.path: print(p) return 0 if args.print_configs: ar.print_configs() return 0 if args.print_root_dir: print(ar.root_dir) return 0 if args.print_files: ar.print_files() return 0 if args.print_tests: ar.print_tests() return 0 if args.print_deps or args.pre_commit and not ar.supports_test_dependency_files(): printer.writeln_name('ERROR: Cannot figure out dependencies. snakefood missing.') return 1 if args.print_deps: dep_files = ar.test_dependency_files() for filename in sorted(dep_files.keys()): print(filename) for dep_file in dep_files[filename]: print(' %s' % (dep_file.filename)) return 0 # Start with a clean environment so unit testing can be deterministic and not subject # to whatever the user happened to have exported. PYTHONPATH and PATH for dependencies # are set below by iterating the configs keep_keys = [ 'BES_LOG', 'BES_VERBOSE', 'BESCFG_PATH', 'DEBUG', 'BES_TEMP_DIR' ] if args.dont_hack_env: keep_keys.extend([ 'PATH', 'PYTHONPATH']) env = os_env.make_clean_env(keep_keys = keep_keys) env['PYTHONDONTWRITEBYTECODE'] = 'x' variables = { 'rebuild_dir': path.expanduser('~/.rebuild'), 'system': host.SYSTEM, } if not args.dont_hack_env: ar.update_environment(env, variables) # Update env with whatever was given in --env env.update(args.env) num_passed = 0 num_failed = 0 num_executed = 0 num_tests = len(ar.test_descriptions) failed_tests = [] # Remove current dir from sys.path to avoid side effects if cwd in sys.path: sys.path.remove(cwd) if args.egg: pythonpath = env_var(env, 'PYTHONPATH') pythonpath.remove(cwd) for config in ar.env_dependencies_configs: setup_dot_py = path.join(config.root_dir, 'setup.py') if not path.isfile(setup_dot_py): raise RuntimeError('No setup.py found in %s to make the egg.' % (cwd)) egg_zip = egg.make(setup_dot_py) pythonpath.prepend(egg_zip) printer.writeln_name('using tmp egg: %s' % (egg_zip)) if args.save_egg: file_util.copy(egg_zip, path.join(cwd, path.basename(egg_zip))) if args.pre_commit: missing_from_git = [] for filename, dep_files in ar.test_dependency_files().items(): for dep_file in dep_files: if dep_file.config and not dep_file.git_tracked: missing_from_git.append(dep_file.filename) if missing_from_git: for f in missing_from_git: printer.writeln_name('PRE_COMMIT: missing from git: %s' % (path.relpath(f))) return 1 return 0 ar.cleanup_python_compiled_files() # Do all our work with a temporary working directory to be able to check for side effects tmp_cwd = temp_file.make_temp_dir(prefix = 'bes_test_', suffix = '.tmp.dir', delete = False) os.chdir(tmp_cwd) # Use what the OS thinks the path is (to deal with symlinks and virtual tmpfs things) tmp_cwd = os.getcwd() if not args.dry_run and args.page: printer.OUTPUT = tempfile.NamedTemporaryFile(prefix = 'bes_test', delete = True, mode = 'w') total_tests = _count_tests(ar.inspect_map, ar.test_descriptions) total_files = len(ar.test_descriptions) total_num_tests = 0 if not args.python: args.python = [ 'python' ] if args.profile: args.profile = path.abspath(args.profile) if not _check_program('cprofilev'): return 1 if args.coverage: args.coverage = path.abspath(args.coverage) coverage_exe = _check_program('coverage') if not coverage_exe: return 1 args.python = [ coverage_exe ] if args.profile and args.coverage: printer.writeln_name('ERROR: --profile and --coverage are mutually exclusive.') return 1 options = test_options(args.dry_run, args.verbose, args.stop, args.timing, args.profile, args.coverage, args.python, args.temp_dir) timings = {} total_time_start = time.time() stopped = False for i, test_desc in enumerate(ar.test_descriptions): file_info = test_desc.file_info filename = file_info.filename if not filename in timings: timings[filename] = [] for python_exe in args.python: result = _test_execute(python_exe, ar.inspect_map, filename, test_desc.tests, options, i + 1, total_files, cwd, env) timings[filename].append(result.elapsed_time) total_num_tests += result.num_tests_run num_executed += 1 if result.success: num_passed += 1 else: num_failed += 1 failed_tests.append(( python_exe, filename, result )) if args.stop and not result.success: stopped = True if stopped: break total_elapsed_time = 1000 * (time.time() - total_time_start) if args.dry_run: return 0 num_skipped = num_tests - num_executed summary_parts = [] if total_num_tests == total_tests: function_summary = '(%d %s)' % (total_tests, _make_test_string(total_tests)) else: function_summary = '(%d of %d %s)' % (total_num_tests, total_tests, _make_test_string(total_tests)) if num_failed > 0: summary_parts.append('%d of %d fixtures FAILED' % (num_failed, num_tests)) summary_parts.append('%d of %d passed %s' % (num_passed, num_tests, function_summary)) if num_skipped > 0: summary_parts.append('%d of %d skipped' % (num_skipped, num_tests)) summary = '; '.join(summary_parts) printer.writeln_name('%s' % (summary)) if failed_tests: longest_python_exe = max([len(path.basename(p)) for p in options.interpreters]) for python_exe, filename, result in failed_tests: if len(options.interpreters) > 1: python_exe_blurb = path.basename(python_exe).rjust(longest_python_exe) else: python_exe_blurb = '' error_status = unit_test_output.error_status(result.output) for error in error_status.errors: printer.writeln_name('%5s: %s %s :%s.%s' % (error.error_type, python_exe_blurb, file_util.remove_head(filename, cwd), error.fixture, error.function)) if num_failed > 0: rv = 1 else: rv = 0 if args.timing: filenames = sorted(timings.keys()) num_filenames = len(filenames) for i, filename in zip(range(0, num_filenames), filenames): short_filename = file_util.remove_head(filename, cwd) all_timings = timings[filename] num_timings = len(all_timings) avg_ms = _timing_average(all_timings) * 1000.0 if num_timings > 1: run_blurb = '(average of %d runs)' % (num_timings) else: run_blurb = '' if num_filenames > 1: count_blurb = '[%s of %s] ' % (i + 1, num_filenames) else: count_blurb = '' printer.writeln_name('timing: %s%s - %2.2f ms %s' % (count_blurb, short_filename, avg_ms, run_blurb)) if total_elapsed_time >= 1000.0: printer.writeln_name('total time: %2.2f s' % (total_elapsed_time / 1000.0)) else: printer.writeln_name('total time: %2.2f ms' % (total_elapsed_time)) if args.page: subprocess.call([ args.pager, printer.OUTPUT.name ]) current_cwd = os.getcwd() if current_cwd != tmp_cwd: printer.writeln_name('SIDE EFFECT: working directory was changed from %s to %s' % (tmp_cwd, current_cwd)) droppings = file_find.find(current_cwd, relative = False, file_type = file_find.ANY) for dropping in droppings: printer.writeln_name('SIDE EFFECT: dropping found: %s' % (dropping)) if not droppings: os.chdir('/tmp') file_util.remove(tmp_cwd) return rv
def main(): root = os.getcwd() make_template_tarball(root, 'template', '1.0.0') make_template_tarball(root, 'templatedepends', '1.2.3') PACKAGES = [ ( 'fructose-3.4.5-6', 'template', '1.0.0', {} ), ( 'mercury-1.2.8-0', 'template', '1.0.0', {} ), ( 'arsenic-1.2.9-0', 'template', '1.0.0', {} ), ( 'fiber-1.0.0-0', 'template', '1.0.0', {} ), ( 'water-1.0.0-0', 'template', '1.0.0', {} ), ( 'fruit-1.0.0', 'templatedepends', '1.2.3', { # '#@REB_20@': 'PKG_CHECK_MODULES([CACA], [caca])', '/*@fruit1_dot_c@*/': 'file:template/code/fruit/fruit1.c', '/*@fruit1_dot_h@*/': 'file:template/code/fruit/fruit1.h', '/*@fruit2_dot_c@*/': 'file:template/code/fruit/fruit2.c', '/*@fruit2_dot_h@*/': 'file:template/code/fruit/fruit2.h', } ), ( 'pear-1.2.3-1', 'templatedepends', '1.2.3', { '/*@pear1_dot_c@*/': 'file:template/code/pear/pear1.c', '/*@pear1_dot_h@*/': 'file:template/code/pear/pear1.h', '/*@pear2_dot_c@*/': 'file:template/code/pear/pear2.c', '/*@pear2_dot_h@*/': 'file:template/code/pear/pear2.h', } ), ( 'orange-6.5.4-3', 'templatedepends', '1.2.3', { '/*@orange1_dot_c@*/': 'file:template/code/orange/orange1.c', '/*@orange1_dot_h@*/': 'file:template/code/orange/orange1.h', '/*@orange2_dot_c@*/': 'file:template/code/orange/orange2.c', '/*@orange2_dot_h@*/': 'file:template/code/orange/orange2.h', } ), ( 'apple-1.2.3-1', 'templatedepends', '1.2.3', { '/*@smoothie1_dot_c@*/': 'file:template/code/smoothie/smoothie1.c', '/*@smoothie1_dot_h@*/': 'file:template/code/smoothie/smoothie1.h', '/*@smoothie2_dot_c@*/': 'file:template/code/smoothie/smoothie2.c', '/*@smoothie2_dot_h@*/': 'file:template/code/smoothie/smoothie2.h', } ), ] xPACKAGES = [ ( 'pear-1.2.3-1', 'templatedepends', '1.2.3', { '/*@pear1_dot_c@*/': 'file:template/code/pear/pear1.c', '/*@pear1_dot_h@*/': 'file:template/code/pear/pear1.h', '/*@pear2_dot_c@*/': 'file:template/code/pear/pear2.c', '/*@pear2_dot_h@*/': 'file:template/code/pear/pear2.h', } ), ] for _, _, _, more_replacements in PACKAGES: for key, value in more_replacements.items(): #substitute(clazz, s, d): if value.startswith('file:'): filename = value.partition(':')[2] more_replacements[key] = file_util.read(filename) pc_files_dir = path.join(root, '../pkg_config/dependency_tests') for package, template_name, template_version, more_replacements in PACKAGES: template_tarball = path.join(root, '%s-%s.tar.gz' % (template_name, template_version)) tmp_dir = temp_file.make_temp_dir(delete = not DEBUG) if DEBUG: print('DEBUG1: tmp_dir=%s' % (tmp_dir)) print('F**K: %s' % (package)) desc = unit_test_packages.TEST_PACKAGES[package] print('desc: %s' % (str(desc))) pi = desc #desc['package_info'] version_no_revision = '%s-%s' % (pi.name, pi.version) archiver.extract(template_tarball, tmp_dir, base_dir = 'foo', strip_common_ancestor = True) working_dir = path.join(tmp_dir, 'foo') if DEBUG: print('working_dir=%s' % (working_dir)) refactor_files.refactor(template_name, pi.name, [ working_dir ]) file_paths = [ path.join(working_dir, 'configure.ac'), path.join(working_dir, 'libs/%s1/%s1.c' % (pi.name, pi.name)), path.join(working_dir, 'libs/%s1/%s1.h' % (pi.name, pi.name)), path.join(working_dir, 'libs/%s2/%s2.c' % (pi.name, pi.name)), path.join(working_dir, 'libs/%s2/%s2.h' % (pi.name, pi.name)), ] default_replacements = make_default_replacements(DEFAULT_REPLACEMENTS, pi.name) print('F**K: %s' % (default_replacements)) replacements = {} replacements.update(default_replacements) replacements.update({ '[%s]' % (template_version): '[%s]' % (pi.version) }) replacements.update(more_replacements) for k, v in sorted(replacements.items()): print('REPLACEMENTS: %s: %s' % (k, v)) for f in file_paths: file_replace.replace(f, replacements, backup = False) command = [ 'cd %s' % (working_dir), 'automake -a', 'autoconf', './configure', 'make dist', 'cp %s.tar.gz %s' % (version_no_revision, root), ] env = os_env.make_clean_env(keep_keys = [ 'PATH', 'PKG_CONFIG_PATH' ]) env['GZIP'] = '-n' flat_command = ' && '.join(command) execute.execute(flat_command, shell = True, non_blocking = True, env = env) pc_files = file_find.find_fnmatch(working_dir, [ '*.pc' ], relative = False) for pc_file in pc_files: dst_pc_file = path.join(pc_files_dir, path.basename(pc_file)) file_util.copy(pc_file, dst_pc_file)
def create_package(self, filename, debug=False): tmp_dir = temp_file.make_temp_dir(delete=not debug) if debug: print('tmp_dir: %s' % (tmp_dir)) stage_dir = path.join(tmp_dir, 'stage') files_dir = path.join(stage_dir, 'files') env_files_dir = path.join(stage_dir, 'env') file_util.mkdir(files_dir) file_util.mkdir(env_files_dir) temp_file.write_temp_files(files_dir, self.files) temp_file.write_temp_files(env_files_dir, self.env_files) tmp_compiler_dir = path.join(tmp_dir, 'objects') cc = compiler(build_target.make_host_build_target()) include_path = [] lib_path = [] static_c_libs = self.objects.get('static_c_libs', []) for static_c_lib in static_c_libs: sources, headers = static_c_lib.write_files(tmp_compiler_dir) include_dir = path.join(tmp_compiler_dir, static_c_lib.filename, 'include') lib_dir = path.join(tmp_compiler_dir, static_c_lib.filename) include_path.append(include_dir) lib_path.append(lib_dir) cflags = ['-I%s' % (include_dir)] targets = cc.compile_c([source.path for source in sources], cflags=cflags) lib_filename = path.join(tmp_compiler_dir, static_c_lib.filename, path.basename(static_c_lib.filename)) lib = cc.make_static_lib(lib_filename, [target.object for target in targets]) file_util.copy(lib, path.join(files_dir, static_c_lib.filename)) for header in headers: file_util.copy(header.path, path.join(files_dir, header.filename)) shared_c_libs = self.objects.get('shared_c_libs', []) for shared_c_lib in shared_c_libs: sources, headers = shared_c_lib.write_files(tmp_compiler_dir) include_dir = path.join(tmp_compiler_dir, shared_c_lib.filename, 'include') lib_dir = path.join(tmp_compiler_dir, shared_c_lib.filename) include_path.append(include_dir) lib_path.append(lib_dir) cflags = ['-I%s' % (include_dir)] targets = cc.compile_c([source.path for source in sources], cflags=cflags) lib_filename = path.join(tmp_compiler_dir, shared_c_lib.filename, path.basename(shared_c_lib.filename)) lib = cc.make_shared_lib(lib_filename, [target.object for target in targets]) file_util.copy(lib, path.join(files_dir, shared_c_lib.filename)) for header in headers: file_util.copy(header.path, path.join(files_dir, header.filename)) c_programs = self.objects.get('c_programs', []) for c_program in c_programs: sources, headers = c_program.write_files(tmp_compiler_dir) include_dir = path.join(tmp_compiler_dir, c_program.filename, 'include') lib_dir = path.join(tmp_compiler_dir, c_program.filename) cflags = ['-I%s' % (include_dir)] cflags += ['-I%s' % (inc) for inc in include_path] ldflags = ['-L%s' % (lib_dir)] ldflags += ['-L%s' % (lib) for lib in lib_path] ldflags += c_program.ldflags or [] targets = cc.compile_c([source.path for source in sources], cflags=cflags) exe_filename = path.join(tmp_compiler_dir, c_program.filename, path.basename(c_program.filename)) exe = cc.link_exe(exe_filename, [target.object for target in targets], ldflags=ldflags) file_util.copy(exe, path.join(files_dir, c_program.filename)) pkg_desc = package_descriptor(self.metadata.name, self.metadata.build_version, properties=self.properties, requirements=self.requirements) return package.create_package(filename, pkg_desc, self.metadata.build_target, stage_dir)
def install(clazz, filename, dest_dir, mode = 0o755): file_util.mkdir(dest_dir) file_util.copy(filename, dest_dir) os.chmod(path.join(dest_dir, filename), mode)