def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) for pinpath in args.pins: pin = Link(pinpath) if args.jenkins: print('echo "Cloning %s"' % pin.url) clone_jenkins(pin.url, args.repos, pin.commitish, args.credentials) else: try: print("Cloning %s" % pin.url) util.makedirs(args.repos) pq_repo = clone(pin.url, args.repos, pin.commitish) if args.clone_base and pin.base: print("Cloning %s" % pin.base) base_repo = clone(pin.base, args.repos, pin.base_commitish) apply_patchqueue(base_repo, pq_repo, pin.patchqueue) except git.GitCommandError as gce: print(gce.stderr)
def parse_args_or_exit(argv=None): """ Parse command line options """ parser = argparse.ArgumentParser( description='Pack sources and patchqueues into a source RPM', parents=[ planex.cmd.args.common_base_parser(), planex.cmd.args.rpm_define_parser(), planex.cmd.args.keeptmp_parser() ]) parser.add_argument("spec", metavar="SPEC", help="Spec file") parser.add_argument("sources", metavar="SOURCE/PATCHQUEUE", nargs='*', help="Source and patchqueue files") parser.add_argument("--metadata", dest="metadata", action="store_true", help="Add inline comments in the spec file " "to specify what provided sources, patches " "and patchqueues") argcomplete.autocomplete(parser) parsed_args = parser.parse_args(argv) links = [ arg for arg in argv if arg.endswith(".lnk") or arg.endswith(".pin") ] parsed_args.link = None if links: parsed_args.link = Link(links[0]) return parsed_args
def main(argv=None): """ Entry point """ setup_sigint_handler() args = parse_args_or_exit(argv) allspecs = dedupe(args.specs, dedupe_key) links = { pkgname(path): Link(path) for path in allspecs if path.endswith(".lnk") or path.endswith(".pin") } try: specs = { pkgname(path): load(path, link=links.get(pkgname(path)), defines=args.define) for path in allspecs if path.endswith(".spec") } except SpecNameMismatch as exn: sys.exit("error: %s\n" % exn.message) provides_to_rpm = package_to_rpm_map(specs.values()) if not args.json: print_makefile_rules(args, allspecs, specs, provides_to_rpm) else: print_to_json(specs, provides_to_rpm)
def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) util.setup_logging(args) link = Link(args.link) # Repo and ending tag are specified in the link file repo = link.url end_tag = link.commitish if end_tag is None: end_tag = "HEAD" # If the repository URL in the link is remote, look for a # local clone in repos (without a .git suffix) url = urlparse(repo) if url.scheme: reponame = os.path.basename(url.path).rsplit(".git")[0] repo = os.path.join(args.repos, reponame) util.makedirs(os.path.dirname(args.tarball)) with open('{0}.origin'.format(args.tarball), 'w') as origin_file: origin_file.write('{0}\n'.format(git.origin_url(repo))) if repo.endswith(".pg"): with FileUpdate(args.tarball) as outfile: git.archive(repo, end_tag, outfile) sys.exit(0) # Start tag is based on the version specified in the spec file, # but the tag name may be slightly different (v1.2.3 rather than 1.2.3) # If the link file does not list a spec file, assume that there is one in # the usual place basename = os.path.splitext(os.path.basename(args.link))[0] spec_path = os.path.join("SPECS", "%s.spec" % basename) spec = Spec(spec_path) start_tag = link.base_commitish if start_tag is None: start_tag = spec.version() if start_tag not in git.tags(repo): start_tag = "v%s" % start_tag try: tmpdir = tempfile.mkdtemp(prefix="px-pq-") assemble_patchqueue(tmpdir, link, repo, start_tag, end_tag) assemble_extra_sources(tmpdir, repo, spec, link) with FileUpdate(args.tarball) as outfile: tarball.make(tmpdir, outfile) finally: if args.keeptmp: print("Working directory retained at %s" % tmpdir) else: shutil.rmtree(tmpdir)
def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) setup_logging(args) for pin in args.pins: package, spec_path, link_pin_path = definitions_for(pin) link_pin = None if link_pin_path: logging.debug("Reading link/pin file %s", link_pin_path) link_pin = Link(str(link_pin_path)) logging.debug("Reading spec file %s", spec_path) spec = planex.spec.load(str(spec_path), link=link_pin, check_package_name=False) if args.clone: # just clone git resources clone_all_git(args, spec) elif args.jenkins: # generate Jenkins information clone_jenkins(args, spec) else: resources = spec.resources_dict() src_res = resources['Source0'] if src_res.is_repo: # remove trailing '.git' repo_path = Path(args.repos, src_res.basename.rsplit(".git")[0]) else: repo_path = Path(args.repos, get_non_repo_name(src_res.url, package)) if not repo_path.parent.exists(): repo_path.parent.mkdir(parents=True) if "PatchQueue0" in resources: if "Archive0" in resources: # component with patches and patchqueue clone_with_patches(spec_path, repo_path, resources['Source0'], resources['Archive0'], resources['PatchQueue0']) else: # component with patchqueue clone_with_patchq(args.repos, repo_path, resources['Source0'], resources['PatchQueue0']) elif "Archive0" in resources: # component with patches clone_with_patches(spec_path, repo_path, resources['Source0'], resources['Archive0'], None) else: # clone all fetchable resources clone_all_fetchable(args, package, spec)
def main(argv=None): """ Main function. Fetch sources directly or via a link file. """ setup_sigint_handler() args = parse_args_or_exit(argv) setup_logging(args) link = Link(args.link) # Extract the spec file with tarfile.open(args.tarball) as tar: tar_root = archive_root(tar) extract_file(tar, os.path.join(tar_root, link.specfile), args.output)
def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) for pinpath in args.pins: pin = Link(pinpath) reponame = os.path.basename(pin.url).rsplit(".git")[0] checkoutdir = os.path.join(args.repos, reponame) if args.jenkins: print 'echo "Cloning %s"' % pin.url print CHECKOUT_TEMPLATE.substitute(url=pin.url, branch=pin.commitish, checkoutdir=checkoutdir, credentials=args.credentials) else: print "Cloning %s" % pin.url util.makedirs(os.path.dirname(checkoutdir)) clone(pin.url, checkoutdir, pin.commitish) if pin.base is not None: base_reponame = os.path.basename(pin.base).rsplit(".git")[0] base_checkoutdir = os.path.join(args.repos, base_reponame) print "Cloning %s" % pin.base util.makedirs(os.path.dirname(base_checkoutdir)) clone(pin.base, base_checkoutdir, pin.base_commitish) # Symlink the patchqueue patch_path = os.path.join(base_checkoutdir, ".git/patches") link_path = os.path.relpath(checkoutdir, patch_path) util.makedirs(patch_path) os.symlink(os.path.join(link_path, pin.patchqueue), os.path.join(patch_path, pin.base_commitish)) # Create empty guilt status for the branch status = os.path.join(patch_path, pin.base_commitish, 'status') fileh = open(status, 'w') fileh.close() # Push patchqueue subprocess.check_call(['guilt', 'push', '--all'], cwd=base_checkoutdir)
def fetch_via_link(args): """ Parse link file and download patch tarball. """ link = Link(args.spec_or_link) url = urlparse.urlparse(str(link.url)) try: fetch_http(url, args.sources[0], args.retries + 1) except pycurl.error as exn: # Curl download failed sys.exit("%s: Failed to fetch %s: %s" % (sys.argv[0], urlparse.urlunparse(url), exn.args[1])) except IOError as exn: # IO error saving source file sys.exit("%s: %s: %s" % (sys.argv[0], exn.strerror, exn.filename))
def load_spec_and_lnk(repo_path, package_name): """ Return the Spec object for repo_path/SPECS/package_name updated by the current link. Exception("package not present") otherwise. """ partial_file_path = "%s/SPECS/%s" % (repo_path, package_name) specname = "%s.spec" % partial_file_path if not os.path.isfile(specname): sys.exit("Spec file for {} not present in {}/SPECS".format( package_name, repo_path)) linkname = "%s.lnk" % partial_file_path link = Link(linkname) if os.path.isfile(linkname) else None spec = planex.spec.load(specname, link=link, defines=RPM_DEFINES) return spec
def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) for pinpath in args.pins: pin = Link(pinpath) clone_all(args, pin) if args.patchqueue: assemble_patchqueue(args, pin) if args.repatched: specname, _ = splitext(basename(pinpath)) specpath = "SPECS/{}.spec".format(specname) defines = [("_topdir", "_build"), ("_sourcedir", "%_topdir/SOURCES/%name")] assemble_repatched(args, specpath, defines, pin)
def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) for pinpath in args.pins: pin = Link(pinpath) if args.jenkins: print 'echo "Cloning %s"' % pin.url clone_jenkins(pin.url, args.repos, pin.commitish, args.credentials) else: print "Cloning %s" % pin.url util.makedirs(args.repos) pq_repo = clone(pin.url, args.repos, pin.commitish) if pin.base is not None: print "Cloning %s" % pin.base base_repo = clone(pin.base, args.repos, pin.base_commitish) # Symlink the patchqueue repository into .git/patches link_path = relpath(pq_repo.working_dir, base_repo.git_dir) symlink(link_path, join(base_repo.git_dir, "patches")) # Symlink the patchqueue directory to match the base_repo # branch name as guilt expects patchqueue_path = join(base_repo.git_dir, "patches", base_repo.active_branch.name) branch_path = dirname(base_repo.active_branch.name) util.makedirs(dirname(patchqueue_path)) symlink(relpath(pin.patchqueue, branch_path), patchqueue_path) # Create empty guilt status for the branch status = join(patchqueue_path, 'status') open(status, 'w').close() # Push patchqueue subprocess.check_call(['guilt', 'push', '--all'], cwd=base_repo.working_dir)
def spec_and_lnk(repo_path, package_name): """ Return the Spec and Link object for repo_path/SPECS/package_name. Link can be None if not present. Exception("package not present") otherwise """ partial_file_path = "%s/SPECS/%s" % (repo_path, package_name) specname = "%s.spec" % partial_file_path if not os.path.isfile(specname): print("Spec file for %s not present in %s/SPECS" % (package_name, repo_path)) sys.exit(1) spec = Spec(specname) linkname = "%s.lnk" % partial_file_path link = Link(linkname) if os.path.isfile(linkname) else None return spec, link
def main(argv=None): """Entry point.""" args = parse_args_or_exit(argv) setup_logging(args) spec = Spec(args.specfile_path) link = None if args.lnkfile_path is not None: link = Link(args.lnkfile_path) pin = None pinfile = "{}/{}.pin".format( args.pinsdir, get_name(args.specfile_path, args.lnkfile_path) ) if os.path.exists(pinfile): pin = pinfile manifest = generate_manifest(spec, link, pin) print(json.dumps(manifest, indent=4))
def fetch_source(args): """ Download requested source using URL from spec file. """ link = None if args.link: link = Link(args.link) spec = planex.spec.load(args.spec, link=link, check_package_name=args.check_package_names, defines=args.define) try: resource = spec.resource(args.source) except KeyError as exn: sys.exit("%s: No source corresponding to %s" % (sys.argv[0], exn)) try: fetch_source_dispatch(resource, args.retries) except UnsupportedScheme as exn: sys.exit("%s: Unsupported url scheme %s" % (sys.argv[0], exn))
def parse_args_or_exit(argv=None): """ Parse command line options """ parser = argparse.ArgumentParser( description='Pack sources and patchqueues into a source RPM') add_common_parser_options(parser) parser.add_argument("spec", metavar="SPEC", help="Spec file") parser.add_argument("sources", metavar="SOURCE/PATCHQUEUE", nargs='*', help="Source and patchqueue files") parser.add_argument( "-D", "--define", default=[], action="append", help="--define='MACRO EXPR' define MACRO with value EXPR") parser.add_argument("--keeptmp", action="store_true", help="keep temporary files") argcomplete.autocomplete(parser) parsed_args = parser.parse_args(argv) links = [ arg for arg in argv if arg.endswith(".lnk") or arg.endswith(".pin") ] parsed_args.link = None if links: parsed_args.link = Link(links[0]) patchqueues = [arg for arg in argv if arg.endswith("patches.tar")] parsed_args.patchqueue = None if patchqueues: parsed_args.patchqueue = patchqueues[0] return parsed_args
def setUp(self): rpm_defines = [("dist", ".el6"), ("_topdir", "_build"), ("_sourcedir", "%_topdir/SOURCES/%name")] self.spec = planex.spec.load("tests/data/ocaml-cohttp.spec", link=Link("tests/data/ocaml-cohttp.lnk"), defines=rpm_defines)
def main(argv=None): """ Entry point """ # pylint: disable=R0914 setup_sigint_handler() args = parse_args_or_exit(argv) allspecs = dedupe(args.specs, dedupe_key) try: specs = { pkgname(path): Spec(path, defines=args.define) for path in allspecs if path.endswith(".spec") } except SpecNameMismatch as exn: sys.stderr.write("error: %s\n" % exn.message) sys.exit(1) links = { pkgname(path): Link(path) for path in allspecs if path.endswith(".lnk") or path.endswith(".pin") } provides_to_rpm = package_to_rpm_map(specs.values()) print("# -*- makefile -*-") print("# vim:ft=make:") if args.verbose: print("# inputs: %s" % " ".join(allspecs)) for spec in specs.itervalues(): build_srpm_from_spec(spec, links.get(spec.name())) # Manifest dependencies must come after spec dependencies # otherwise manifest.json will be the SRPM's first dependency # and will be passed to rpmbuild in the spec position. create_manifest_deps(spec) if spec.name() in links: srpmpath = spec.source_package_path() patchpath = spec.expand_macro("%_sourcedir/patches.tar") print('%s: %s' % (srpmpath, patchpath)) print('%s: %s' % (srpmpath, links[spec.name()].linkpath)) print('%s: %s' % (patchpath, links[spec.name()].linkpath)) download_rpm_sources(spec) build_rpm_from_srpm(spec) if args.buildrequires: buildrequires_for_rpm(spec, provides_to_rpm) print() # Generate targets to build all srpms and all rpms all_rpms = [] all_srpms = [] for spec in specs.itervalues(): rpm_path = spec.binary_package_paths()[-1] all_rpms.append(rpm_path) all_srpms.append(spec.source_package_path()) print("%s: %s" % (spec.name(), rpm_path)) print("%s.srpm: %s" % (spec.name(), spec.source_package_path())) print() print("RPMS := " + " \\\n\t".join(all_rpms)) print() print("SRPMS := " + " \\\n\t".join(all_srpms))