def main(argv=None): """ Main function. Create a mock config containing yum repositories """ setup_sigint_handler() args = parse_args_or_exit(argv) setup_logging(args) yum_repos = load_yum_repos(args.enablerepo) # load the reference config reference = os.path.join(args.configdir, args.root + '.cfg') config_opts = load_mock_reference(reference) if args.config_opt: config_opts.update(args.config_opt) if args.environment: if 'environment' not in config_opts: config_opts['environment'] = {} config_opts['environment'].update(args.environment) conf_key = 'dnf.conf' if 'dnf.conf' in config_opts else 'yum.conf' mock_config_fp = StringIO.StringIO(config_opts[conf_key]) mock_repos = ConfigParser.SafeConfigParser() mock_repos.readfp(mock_config_fp) # replace repo sections in the mock config update_mock_repos(mock_repos, yum_repos, args.yum_config_opt) mock_config_fp.truncate(0) mock_repos.write(mock_config_fp) config_opts[conf_key] = mock_config_fp.getvalue() # write new config with open(args.mockconfig, "w") as fileh: write_mock_cfg(fileh, config_opts)
def main(argv=None): """ Entry point """ setup_sigint_handler() args = parse_args_or_exit(argv) allspecs = dedupe(args.specs, dedupe_key) links = { pkgname(path): Link(path) for path in allspecs if path.endswith(".lnk") or path.endswith(".pin") } try: specs = { pkgname(path): load(path, link=links.get(pkgname(path)), defines=args.define) for path in allspecs if path.endswith(".spec") } except SpecNameMismatch as exn: sys.exit("error: %s\n" % exn.message) provides_to_rpm = package_to_rpm_map(specs.values()) if not args.json: print_makefile_rules(args, allspecs, specs, provides_to_rpm) else: print_to_json(specs, provides_to_rpm)
def main(argv=None): """ Entry point """ setup_sigint_handler() args = parse_args_or_exit(argv) allspecs = dedupe(args.specs, dedupe_key) links = {pkgname(path): Link(path) for path in allspecs if path.endswith(".lnk") or path.endswith(".pin")} try: specs = {pkgname(path): load(path, link=links.get(pkgname(path)), defines=args.define) for path in allspecs if path.endswith(".spec")} except SpecNameMismatch as exn: sys.exit("error: %s\n" % exn.message) provides_to_rpm = package_to_rpm_map(specs.values()) if not args.json: print_makefile_rules(args, allspecs, specs, provides_to_rpm) else: print_to_json(specs, provides_to_rpm)
def main(): """ Main entry point """ setup_sigint_handler() logging.basicConfig(format='%(message)s', level=logging.ERROR) create_makefile()
def main(argv=None): """ Main function. Create a mock config containing yum repositories """ setup_sigint_handler() args = parse_args_or_exit(argv) setup_logging(args) yum_repos = load_yum_repos(args.repo_config_list) # load the reference config reference = os.path.join(args.configdir, args.root + '.cfg') config_opts = load_mock_reference(reference) if args.config_opt: config_opts.update(args.config_opt) if args.environment: if 'environment' not in config_opts: config_opts['environment'] = {} config_opts['environment'].update(args.environment) conf_key = 'dnf.conf' if 'dnf.conf' in config_opts else 'yum.conf' mock_config_fp = StringIO.StringIO(config_opts[conf_key]) mock_repos = ConfigParser.SafeConfigParser() mock_repos.readfp(mock_config_fp) # replace repo sections in the mock config update_mock_repos(mock_repos, yum_repos, args.yum_config_opt) mock_config_fp.truncate(0) mock_repos.write(mock_config_fp) config_opts[conf_key] = mock_config_fp.getvalue() # write new config with open(args.mockconfig, "w") as fileh: write_mock_cfg(fileh, config_opts)
def main(argv): """ Main function """ setup_sigint_handler() args = parse_args_or_exit(argv) setup_logging(args) args.func(args)
def main(argv=None): """ Main function. Fetch sources directly or via a link file. """ setup_sigint_handler() args = parse_args_or_exit(argv) setup_logging(args) fetch_source(args)
def main(argv=None): """ Entry point """ setup_sigint_handler() args = parse_args_or_exit(argv) allspecs = dedupe(args.specs, dedupe_key) try: specs = {pkgname(path): Spec(path, defines=args.define) for path in allspecs if path.endswith(".spec")} except SpecNameMismatch as exn: sys.stderr.write("error: %s\n" % exn.message) sys.exit(1) links = {pkgname(path): Link(path) for path in allspecs if path.endswith(".lnk") or path.endswith(".pin")} provides_to_rpm = package_to_rpm_map(specs.values()) print("# -*- makefile -*-") print("# vim:ft=make:") if args.verbose: print("# inputs: %s" % " ".join(allspecs)) for spec in specs.itervalues(): print('# %s' % (spec.name())) build_srpm_from_spec(spec, links.get(spec.name())) # Manifest dependencies must come after spec dependencies # otherwise manifest.json will be the SRPM's first dependency # and will be passed to rpmbuild in the spec position. create_manifest_deps(spec) download_rpm_sources(spec, links.get(spec.name())) build_rpm_from_srpm(spec) if args.buildrequires: buildrequires_for_rpm(spec, provides_to_rpm) print() # Generate targets to build all srpms and all rpms all_rpms = [] all_srpms = [] for spec in specs.itervalues(): rpm_path = spec.binary_package_paths()[-1] all_rpms.append(rpm_path) all_srpms.append(spec.source_package_path()) print("%s: %s" % (spec.name(), rpm_path)) print("%s.srpm: %s" % (spec.name(), spec.source_package_path())) print() print("RPMS := " + " \\\n\t".join(all_rpms)) print() print("SRPMS := " + " \\\n\t".join(all_srpms))
def main(argv): """ Main function. Parse spec file and iterate over its sources, downloading them as appropriate. """ util.setup_sigint_handler() intercepted_args, passthrough_args = parse_args_or_exit(argv) config = os.path.join(intercepted_args.configdir, intercepted_args.root + ".cfg") # Initialize yum before setting up logging, because yum uses # logging with a different default loglevel. This avoids # having yum print lots of irrelevant messages during startup. yum_config = util.load_mock_config(config) yumbase = util.get_yumbase(yum_config) setup_yumbase(yumbase) util.setup_logging(intercepted_args) srpm = load_srpm_from_file(passthrough_args[-1]) with open(config) as cfg: mock_config = cfg.read() pkg_hash = get_srpm_hash(srpm, yumbase, mock_config) cachedirs = [os.path.expanduser(x) for x in intercepted_args.cachedirs.split(':')] # Expand default resultdir as done in mock.backend.Root resultdir = intercepted_args.resultdir or \ yum_config['resultdir'] % yum_config if not os.path.isdir(resultdir): os.makedirs(resultdir) # Rebuild if not available in the cache if not in_cache(cachedirs, pkg_hash): logging.debug("Cache miss - rebuilding") build_output = build_package(intercepted_args.configdir, intercepted_args.root, passthrough_args) try: add_to_cache(cachedirs, pkg_hash, build_output) except OSError: # If we can't cache the result, that's not a fatal error pass for cached_file in os.listdir(build_output): dest = os.path.join(resultdir, cached_file) if os.path.exists(dest): os.unlink(dest) shutil.move(os.path.join(build_output, cached_file), resultdir) else: get_from_cache(cachedirs, pkg_hash, resultdir)
def main(argv): """ Main function. Parse spec file and iterate over its sources, downloading them as appropriate. """ util.setup_sigint_handler() intercepted_args, passthrough_args = parse_args_or_exit(argv) config = os.path.join(intercepted_args.configdir, intercepted_args.root + ".cfg") # Initialize yum before setting up logging, because yum uses # logging with a different default loglevel. This avoids # having yum print lots of irrelevant messages during startup. yum_config = util.load_mock_config(config) yumbase = util.get_yumbase(yum_config) setup_yumbase(yumbase) util.setup_logging(intercepted_args) srpm = load_srpm_from_file(passthrough_args[-1]) pkg_hash = get_srpm_hash(srpm, yumbase) cachedirs = [os.path.expanduser(x) for x in intercepted_args.cachedirs.split(':')] # Expand default resultdir as done in mock.backend.Root resultdir = intercepted_args.resultdir or \ yum_config['resultdir'] % yum_config if not os.path.isdir(resultdir): os.makedirs(resultdir) # Rebuild if not available in the cache if not in_cache(cachedirs, pkg_hash): logging.debug("Cache miss - rebuilding") build_output = build_package(intercepted_args.configdir, intercepted_args.root, passthrough_args) try: add_to_cache(cachedirs, pkg_hash, build_output) except OSError: # If we can't cache the result, that's not a fatal error pass for cached_file in os.listdir(build_output): dest = os.path.join(resultdir, cached_file) if os.path.exists(dest): os.unlink(dest) shutil.move(os.path.join(build_output, cached_file), resultdir) else: get_from_cache(cachedirs, pkg_hash, resultdir)
def main(): """ Main entry point """ setup_sigint_handler() shutil.rmtree(BUILD_DIR) os.mkdir(BUILD_DIR) spec = rpm.ts().parseSpec(sys.argv[1]) clean = True if "-noclean" in sys.argv: clean = False # subdirectory of builddir in which the tarball is unpacked; # set by RPM after processing the spec file # if the source file isn't a tarball this won't be set! build_subdir = rpm.expandMacro("%buildsubdir") prepare_build_dir(spec, build_subdir) if os.path.isdir(os.path.join(BUILD_DIR, build_subdir, "debian")): shutil.rmtree(os.path.join(BUILD_DIR, build_subdir, "debian")) # a package with no original tarball is built as a 'native debian package' native = debianmisc.is_native(spec) if not native: # copy over the source, run the prep rule to unpack it, then # rename it as deb expects this should be based on the rewritten # (or not) source name in the debian package - build the debian # dir first and then rename the tarball as needed rename_source(spec, spec.sourceHeader['name'], spec.sourceHeader['version']) debian_dir_from_spec(spec, os.path.join(BUILD_DIR, build_subdir), sys.argv[1], native) cmd = "cd %s\ndpkg-source -b --auto-commit %s" % (BUILD_DIR, build_subdir) print cmd res = subprocess.call(cmd, shell=True) assert res == 0 for i in glob.glob(os.path.join(BUILD_DIR, "*")): if build_subdir in i: continue shutil.copy2(i, SRPM_DIR) if clean: os.unlink(i) if clean: shutil.rmtree(TMPDIR) else: print "makedeb: dpkg input files in %s" % TMPDIR
def main(argv=None): """ Main function. Fetch sources directly or via a link file. """ setup_sigint_handler() args = parse_args_or_exit(argv) setup_logging(args) if args.spec_or_link.endswith('.spec'): fetch_source(args) elif args.spec_or_link.endswith('.lnk'): fetch_via_link(args) else: sys.exit("%s: Unsupported file type: %s" % (sys.argv[0], args.spec_or_link))
def main(argv=None): """ Main function. Fetch sources directly or via a link file. """ setup_sigint_handler() args = parse_args_or_exit(argv) setup_logging(args) link = Link(args.link) # Extract the spec file with tarfile.open(args.tarball) as tar: tar_root = archive_root(tar) extract_file(tar, os.path.join(tar_root, link.specfile), args.output)
def main(argv): """ Main function. Fetch sources directly or via a link file. """ setup_sigint_handler() args = parse_args_or_exit(argv) setup_logging(args) if args.spec_or_link.endswith('.spec'): fetch_sources(args) elif args.spec_or_link.endswith('.lnk'): fetch_via_link(args) else: sys.exit("%s: Unsupported file type: %s" % (sys.argv[0], args.spec_or_link))
def main(argv=None): """ Main entry point. * If run without arguments, create or update the Makefile in the current directory. * If run with --rules, return the path to the Makefile.rules file. """ setup_sigint_handler() args = parse_args_or_exit(argv) logging.basicConfig(format='%(message)s', level=logging.ERROR) if args.rules: print_rules_path() sys.exit(0) create_makefile()
def main(argv): """ Main function. Fetch sources directly or via a link file. """ setup_sigint_handler() args = parse_args_or_exit(argv) setup_logging(args) try: with open(args.link) as fileh: link = json.load(fileh) except IOError as exn: # IO error loading JSON file sys.exit("%s: %s: %s" % (sys.argv[0], exn.strerror, exn.filename)) # Extract the spec file with tarfile.open(args.tarball) as tar: tar_root = archive_root(tar) extract_file(tar, os.path.join(tar_root, str(link['specfile'])), args.output) if 'patchqueue' in link: patch_dir = os.path.join(tar_root, str(link['patchqueue'])) expand_patchqueue(args, tar, os.path.join(patch_dir, 'series')) elif 'patches' in link: patch_dir = os.path.join(tar_root, str(link['patches'])) else: sys.exit("%s: %s: Expected one of 'patchqueue' or 'patches'" % (sys.argv[0], args.link)) # Extract sources contained in the tarball spec = planex.spec.Spec(args.output, topdir=args.topdir, check_package_name=args.check_package_names) for path, url in spec.all_sources(): if url.netloc == '': if 'patchqueue' in link: # trim off prefix src_path = os.path.join(patch_dir, url.path[len(spec.name()) + 1:]) else: src_path = os.path.join(patch_dir, url.path) if src_path not in tar.getnames(): src_path = os.path.join(tar_root, url.path) extract_file(tar, src_path, path)
def main(argv): """ Main function. Parse spec file and iterate over its sources, downloading them as appropriate. """ setup_sigint_handler() args = parse_args_or_exit(argv) setup_logging(args) try: sources = [url_for_source(args.spec, s, args.topdir, args.check_package_names) for s in args.sources] except KeyError as exn: sys.exit("%s: No source corresponding to %s" % (sys.argv[0], exn)) for path, url in sources: check_supported_url(url) if url.scheme in ["http", "https", "file"]: if url.scheme != "file" and args.mirror: if not urlparse.urlparse(args.mirror).scheme: args.mirror = "file://" + args.mirror mpath = os.path.join(args.mirror, os.path.basename(url.path)) url = urlparse.urlparse(mpath) try: fetch_http(url, path, args.retries + 1) except pycurl.error as exn: # Curl download failed sys.exit("%s: Failed to fetch %s: %s" % (sys.argv[0], urlparse.urlunparse(url), exn.args[1])) except IOError as exn: # IO error saving source file sys.exit("%s: %s: %s" % (sys.argv[0], exn.strerror, exn.filename)) elif url.scheme == "" and os.path.dirname(url.path) == "": if not os.path.exists(path): sys.exit("%s: Source not found: %s" % (sys.argv[0], path)) # Source file is pre-populated in the SOURCES directory (part of # the repository - probably a patch or local include). Update # its timestamp to placate make, but don't try to download it. logging.debug("Refreshing timestamp for local source %s", path) os.utime(path, None)
def main(argv): """ Main function. Check out sources defined in a spec file. """ setup_sigint_handler() args = parse_args_or_exit(argv) repos = checkout_remote_source(args.topdir, args.specfile, args.dryrun) if args.linkfile: base_dir = os.path.join(args.topdir, repos[0].dir_name) patch_dir = os.path.join(base_dir, '.git') checkout_patchqueue(patch_dir, args.linkfile, args.dryrun) if not args.dryrun: # Create empty guilt status for the branch status = os.path.join(patch_dir, 'patches', repos[0].branch, 'status') fileh = open(status, 'w') fileh.close() subprocess.check_call(['guilt', 'push', '--all'], cwd=base_dir)
def main(): """ Entry point """ setup_sigint_handler() args = parse_cmdline() specs = {} pkgs_to_ignore = args.ignore for ignore_from in args.ignore_from: try: with open(ignore_from) as ignore_file: for name in ignore_file.readlines(): pkgs_to_ignore.append(name.strip()) except IOError: pass for i in pkgs_to_ignore: print "# Will ignore: %s" % i pins = {} if args.pins_dir: pins_glob = os.path.join(args.pins_dir, "*.spec") pin_paths = glob.glob(pins_glob) if pin_paths and args.packaging == "deb": sys.stderr.write("error: Pinning not supported for debian target") sys.exit(1) for pin_path in pin_paths: spec = pkg.Spec(pin_path, target="rpm", dist=args.dist, check_package_name=args.check_package_names, topdir=args.topdir) pins[os.path.basename(pin_path)] = spec os_type = platform.linux_distribution( full_distribution_name=False)[1].lower() for spec_path in args.specs: try: if args.packaging == "deb": def map_name_fn(name): # pylint: disable=C0111 return mappkgname.map_package(name, os_type) spec = pkg.Spec(spec_path, target="deb", map_name=map_name_fn, check_package_name=args.check_package_names, topdir=args.topdir) else: spec = pkg.Spec(spec_path, target="rpm", dist=args.dist, check_package_name=args.check_package_names, topdir=args.topdir) pkg_name = spec.name() if pkg_name in pkgs_to_ignore: continue spec_name = os.path.basename(spec_path) if spec_name in pins: print "# Pinning '%s' to '%s'" % (pkg_name, pins[spec_name].specpath()) specs[spec_name] = pins[spec_name] else: specs[spec_name] = spec except pkg.SpecNameMismatch as exn: sys.stderr.write("error: %s\n" % exn.message) sys.exit(1) provides_to_rpm = package_to_rpm_map(specs.values()) for spec in specs.itervalues(): build_srpm_from_spec(spec) download_rpm_sources(spec) build_rpm_from_srpm(spec) buildrequires_for_rpm(spec, provides_to_rpm) print "" # Generate targets to build all srpms and all rpms all_rpms = [] all_srpms = [] for spec in specs.itervalues(): rpm_path = spec.binary_package_paths()[-1] all_rpms.append(rpm_path) all_srpms.append(spec.source_package_path()) print "%s: %s" % (spec.name(), rpm_path) print "" print "rpms: " + " \\\n\t".join(all_rpms) print "" print "srpms: " + " \\\n\t".join(all_srpms) print ""
def main(): """ Entry point """ # pylint: disable=R0912, R0914, R0915 setup_sigint_handler() args = parse_cmdline() specs = {} print "# -*- makefile -*-" print "# vim:ft=make:" macros = [tuple(macro.split(' ', 1)) for macro in args.define] if any(len(macro) != 2 for macro in macros): _err = [macro for macro in macros if len(macro) != 2] print "error: malformed macro passed to --define: %r" % _err sys.exit(1) # When using deprecated arguments, we want them at the top of the # macros list if args.topdir is not None: print "# warning: --topdir is deprecated" macros.insert(0, ('_topdir', args.topdir)) if args.dist is not None: print "# warning: --dist is deprecated" macros.insert(1, ('dist', args.dist)) pins = {} if args.pins_dir: pins_glob = os.path.join(args.pins_dir, "*.spec") pin_paths = glob.glob(pins_glob) for pin_path in pin_paths: spec = pkg.Spec(pin_path, check_package_name=args.check_package_names, defines=macros) pins[os.path.basename(pin_path)] = spec for spec_path in args.specs: try: spec = pkg.Spec(spec_path, check_package_name=args.check_package_names, defines=macros) pkg_name = spec.name() spec_name = os.path.basename(spec_path) if spec_name in pins: print "# Pinning '%s' to '%s'" % (pkg_name, pins[spec_name].specpath()) specs[spec_name] = pins[spec_name] else: specs[spec_name] = spec except pkg.SpecNameMismatch as exn: sys.stderr.write("error: %s\n" % exn.message) sys.exit(1) provides_to_rpm = package_to_rpm_map(specs.values()) for spec in specs.itervalues(): build_srpm_from_spec(spec) download_rpm_sources(spec) build_rpm_from_srpm(spec) buildrequires_for_rpm(spec, provides_to_rpm) print "" # Generate targets to build all srpms and all rpms all_rpms = [] all_srpms = [] for spec in specs.itervalues(): rpm_path = spec.binary_package_paths()[-1] all_rpms.append(rpm_path) all_srpms.append(spec.source_package_path()) print "%s: %s" % (spec.name(), rpm_path) print "%s.srpm: %s" % (spec.name(), spec.source_package_path()) print "" print "rpms: " + " \\\n\t".join(all_rpms) print "" print "srpms: " + " \\\n\t".join(all_srpms) print ""
def main(argv): """ Main function. Fetch sources directly or via a link file. """ # pylint: disable=R0914 setup_sigint_handler() args = parse_args_or_exit(argv) setup_logging(args) try: with open(args.link) as fileh: link = json.load(fileh) except IOError as exn: # IO error loading JSON file sys.exit("%s: %s: %s" % (sys.argv[0], exn.strerror, exn.filename)) # Extract the spec file with tarfile.open(args.tarball) as tar: tar_root = archive_root(tar) extract_file(tar, os.path.join(tar_root, str(link['specfile'])), args.output + '.tmp') macros = [tuple(macro.split(' ', 1)) for macro in args.define] if any(len(macro) != 2 for macro in macros): _err = [macro for macro in macros if len(macro) != 2] print "error: malformed macro passed to --define: %r" % _err sys.exit(1) # When using deprecated arguments, we want them at the top of the # macros list if args.topdir is not None: print "# warning: --topdir is deprecated" macros.insert(0, ('_topdir', args.topdir)) with open(args.output, "w") as spec_fh: check_names = args.check_package_names spec = planex.spec.Spec(args.output + '.tmp', check_package_name=check_names, defines=macros) write_manifest(spec_fh, spec, link) if 'branch' in link: spec_fh.write("%%define branch %s\n" % link['branch']) if 'patchqueue' in link: patch_dir = os.path.join(tar_root, str(link['patchqueue'])) expand_patchqueue(spec_fh, spec, args.output + '.tmp', tar, os.path.join(patch_dir, 'series')) elif 'patches' in link: patch_dir = os.path.join(tar_root, str(link['patches'])) copy_spec(args.output + '.tmp', spec_fh) else: sys.exit("%s: %s: Expected one of 'patchqueue' or 'patches'" % (sys.argv[0], args.link)) # Extract sources contained in the tarball spec = planex.spec.Spec(args.output, check_package_name=args.check_package_names, defines=macros) for path, url in spec.all_sources(): if url.netloc == '': if 'patchqueue' in link: # trim off prefix src_path = os.path.join(patch_dir, url.path[len(spec.name()) + 1:]) else: src_path = os.path.join(patch_dir, url.path) if src_path not in tar.getnames(): src_path = os.path.join(tar_root, url.path) extract_file(tar, src_path, path)
def main(): """ Entry point """ # pylint: disable=R0912, R0914 setup_sigint_handler() args = parse_cmdline() specs = {} print "# -*- makefile -*-" print "# vim:ft=make:" pkgs_to_ignore = args.ignore for ignore_from in args.ignore_from: try: with open(ignore_from) as ignore_file: for name in ignore_file.readlines(): pkgs_to_ignore.append(name.strip()) except IOError: pass for i in pkgs_to_ignore: print "# Will ignore: %s" % i pins = {} if args.pins_dir: pins_glob = os.path.join(args.pins_dir, "*.spec") pin_paths = glob.glob(pins_glob) for pin_path in pin_paths: spec = pkg.Spec(pin_path, dist=args.dist, check_package_name=args.check_package_names, topdir=args.topdir) pins[os.path.basename(pin_path)] = spec for spec_path in args.specs: try: spec = pkg.Spec(spec_path, dist=args.dist, check_package_name=args.check_package_names, topdir=args.topdir) pkg_name = spec.name() if pkg_name in pkgs_to_ignore: continue spec_name = os.path.basename(spec_path) if spec_name in pins: print "# Pinning '%s' to '%s'" % (pkg_name, pins[spec_name].specpath()) specs[spec_name] = pins[spec_name] else: specs[spec_name] = spec except pkg.SpecNameMismatch as exn: sys.stderr.write("error: %s\n" % exn.message) sys.exit(1) provides_to_rpm = package_to_rpm_map(specs.values()) for spec in specs.itervalues(): build_srpm_from_spec(spec) download_rpm_sources(spec) build_rpm_from_srpm(spec) buildrequires_for_rpm(spec, provides_to_rpm) print "" # Generate targets to build all srpms and all rpms all_rpms = [] all_srpms = [] for spec in specs.itervalues(): rpm_path = spec.binary_package_paths()[-1] all_rpms.append(rpm_path) all_srpms.append(spec.source_package_path()) print "%s: %s" % (spec.name(), rpm_path) print "" print "rpms: " + " \\\n\t".join(all_rpms) print "" print "srpms: " + " \\\n\t".join(all_srpms) print ""
def main(argv=None): """ Entry point """ # pylint: disable=R0914 setup_sigint_handler() args = parse_args_or_exit(argv) allspecs = dedupe(args.specs, dedupe_key) try: specs = { pkgname(path): Spec(path, defines=args.define) for path in allspecs if path.endswith(".spec") } except SpecNameMismatch as exn: sys.stderr.write("error: %s\n" % exn.message) sys.exit(1) links = { pkgname(path): Link(path) for path in allspecs if path.endswith(".lnk") or path.endswith(".pin") } provides_to_rpm = package_to_rpm_map(specs.values()) print("# -*- makefile -*-") print("# vim:ft=make:") if args.verbose: print("# inputs: %s" % " ".join(allspecs)) for spec in specs.itervalues(): build_srpm_from_spec(spec, links.get(spec.name())) # Manifest dependencies must come after spec dependencies # otherwise manifest.json will be the SRPM's first dependency # and will be passed to rpmbuild in the spec position. create_manifest_deps(spec) if spec.name() in links: srpmpath = spec.source_package_path() patchpath = spec.expand_macro("%_sourcedir/patches.tar") print('%s: %s' % (srpmpath, patchpath)) print('%s: %s' % (srpmpath, links[spec.name()].linkpath)) print('%s: %s' % (patchpath, links[spec.name()].linkpath)) download_rpm_sources(spec) build_rpm_from_srpm(spec) if args.buildrequires: buildrequires_for_rpm(spec, provides_to_rpm) print() # Generate targets to build all srpms and all rpms all_rpms = [] all_srpms = [] for spec in specs.itervalues(): rpm_path = spec.binary_package_paths()[-1] all_rpms.append(rpm_path) all_srpms.append(spec.source_package_path()) print("%s: %s" % (spec.name(), rpm_path)) print("%s.srpm: %s" % (spec.name(), spec.source_package_path())) print() print("RPMS := " + " \\\n\t".join(all_rpms)) print() print("SRPMS := " + " \\\n\t".join(all_srpms))
def main(argv=None): """ Entry point """ # pylint: disable=R0914, R0915 setup_sigint_handler() args = parse_args_or_exit(argv) specs = {} print "# -*- makefile -*-" print "# vim:ft=make:" macros = [tuple(macro.split(' ', 1)) for macro in args.define] if any(len(macro) != 2 for macro in macros): _err = [macro for macro in macros if len(macro) != 2] print "error: malformed macro passed to --define: %r" % _err sys.exit(1) pins = [] if args.pins_dir: pins_glob = os.path.join(args.pins_dir, "*.pin") pins = [pkgname(pin) for pin in glob.glob(pins_glob)] links = {pkgname(lnk): lnk for lnk in args.specs if lnk.endswith(".lnk")} for spec_path in [spec for spec in args.specs if spec.endswith(".spec")]: try: spec = pkg.Spec(spec_path, check_package_name=args.check_package_names, defines=macros) spec_name = os.path.basename(spec_path) specs[spec_name] = spec except pkg.SpecNameMismatch as exn: sys.stderr.write("error: %s\n" % exn.message) sys.exit(1) provides_to_rpm = package_to_rpm_map(specs.values()) for spec in specs.itervalues(): create_manifest_deps(spec) build_srpm_from_spec(spec, (spec.name() in links)) if spec.name() in links or spec.name() in pins: srpmpath = spec.source_package_path() patchpath = spec.expand_macro("%_sourcedir/patches.tar") print '%s: %s' % (srpmpath, patchpath) if spec.name() in pins: pinpath = "%s/%s.pin" % (args.pins_dir, spec.name()) print '%s: %s' % (srpmpath, pinpath) if spec.name() in links: linkpath = "SPECS/%s.lnk" % spec.name() print '%s: %s' % (srpmpath, linkpath) download_rpm_sources(spec) build_rpm_from_srpm(spec) buildrequires_for_rpm(spec, provides_to_rpm) print "" # Generate targets to build all srpms and all rpms all_rpms = [] all_srpms = [] for spec in specs.itervalues(): rpm_path = spec.binary_package_paths()[-1] all_rpms.append(rpm_path) all_srpms.append(spec.source_package_path()) print "%s: %s" % (spec.name(), rpm_path) print "%s.srpm: %s" % (spec.name(), spec.source_package_path()) print "" print "RPMS := " + " \\\n\t".join(all_rpms) print "" print "SRPMS := " + " \\\n\t".join(all_srpms)