def copy_to_tmpdir(tmpdir, source, dest): """ Copy source to dest in tmpdir """ dest_path = os.path.join(tmpdir, dest) util.makedirs(os.path.dirname(dest_path)) shutil.copyfile(source, dest_path)
def apply_patchqueue(base_repo, pq_repo, pq_dir): """ Apply a patchqueue to a base repository """ # Symlink the patchqueue repository into .git/patches link_path = relpath(pq_repo.working_dir, base_repo.git_dir) symlink(link_path, join(base_repo.git_dir, "patches")) # Symlink the patchqueue directory to match the base_repo # branch name as guilt expects patchqueue_path = join(base_repo.git_dir, "patches", base_repo.active_branch.name) branch_path = dirname(base_repo.active_branch.name) util.makedirs(dirname(patchqueue_path)) try: symlink(relpath(pq_dir, branch_path), patchqueue_path) except OSError as err: if err.errno == errno.EEXIST: pass else: raise # Create empty guilt status for the branch status = join(patchqueue_path, 'status') open(status, 'w').close() # Push patchqueue # `guilt push --all` fails with a non-zero error code if the patchqueue # is empty; this cannot be distinguished from a patch failing to apply, # so skip trying to push if the patchqueue is empty. patches = subprocess.check_output(['guilt', 'unapplied'], cwd=base_repo.working_dir) if patches: subprocess.check_call(['guilt', 'push', '--all'], cwd=base_repo.working_dir)
def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) for pinpath in args.pins: pin = Link(pinpath) if args.jenkins: print('echo "Cloning %s"' % pin.url) clone_jenkins(pin.url, args.repos, pin.commitish, args.credentials) else: try: print("Cloning %s" % pin.url) util.makedirs(args.repos) pq_repo = clone(pin.url, args.repos, pin.commitish) if args.clone_base and pin.base: print("Cloning %s" % pin.base) base_repo = clone(pin.base, args.repos, pin.base_commitish) apply_patchqueue(base_repo, pq_repo, pin.patchqueue) except git.GitCommandError as gce: print(gce.stderr)
def make(inputdir, outputfile): """ Create a new tarball named outputfile and recursively add all files in inputdir to it. """ tarmode = "w" if outputfile.endswith("gz"): tarmode += ":gz" if outputfile.endswith("bz2"): tarmode += ":bz2" def reset(tarinfo): """ Clean file ownership and naming when adding to archive """ tarinfo.uid = 0 tarinfo.gid = 0 tarinfo.uname = "root" tarinfo.gname = "root" tarinfo.name = os.path.relpath(tarinfo.name, inputdir[1:]) return tarinfo util.makedirs(os.path.dirname(outputfile)) with tarfile.open(outputfile, mode=tarmode) as tar: tar.add(inputdir, filter=reset)
def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) util.setup_logging(args) link = Link(args.link) # Repo and ending tag are specified in the link file repo = link.url end_tag = link.commitish if end_tag is None: end_tag = "HEAD" # If the repository URL in the link is remote, look for a # local clone in repos (without a .git suffix) url = urlparse(repo) if url.scheme: reponame = os.path.basename(url.path).rsplit(".git")[0] repo = os.path.join(args.repos, reponame) util.makedirs(os.path.dirname(args.tarball)) with open('{0}.origin'.format(args.tarball), 'w') as origin_file: origin_file.write('{0}\n'.format(git.origin_url(repo))) if repo.endswith(".pg"): with FileUpdate(args.tarball) as outfile: git.archive(repo, end_tag, outfile) sys.exit(0) # Start tag is based on the version specified in the spec file, # but the tag name may be slightly different (v1.2.3 rather than 1.2.3) # If the link file does not list a spec file, assume that there is one in # the usual place basename = os.path.splitext(os.path.basename(args.link))[0] spec_path = os.path.join("SPECS", "%s.spec" % basename) spec = Spec(spec_path) start_tag = link.base_commitish if start_tag is None: start_tag = spec.version() if start_tag not in git.tags(repo): start_tag = "v%s" % start_tag try: tmpdir = tempfile.mkdtemp(prefix="px-pq-") assemble_patchqueue(tmpdir, link, repo, start_tag, end_tag) assemble_extra_sources(tmpdir, repo, spec, link) with FileUpdate(args.tarball) as outfile: tarball.make(tmpdir, outfile) finally: if args.keeptmp: print("Working directory retained at %s" % tmpdir) else: shutil.rmtree(tmpdir)
def clone_all(args, pin): """ If [args.jenkins] prints the clone string for jenkins else it clones all the clonable sources into [args.repos]. """ # The following assumes that the pin file does not use any # rpm macro in its fields. We can enable them by using # planex.spec.load and the right RPM_DEFINES but it is more # error prone and should probably be done only if we see # that it is an essential feature. gathered = ([ source for _, source in pin.sources.items() if source.get('commitish', False) ] + [ archive for _, archive in pin.archives.items() if archive.get('commitish', False) ] + [ pq for _, pq in pin.patchqueue_sources.items() if pq.get('commitish', False) ]) # Prevent double-cloning of a repository gathered = set((gath['URL'], gath['commitish']) for gath in gathered) if gathered: print('echo "Clones for %s"' % basename(pin.linkpath)) # this is suboptimal but the sets are very small if any(commitish1 != commitish2 for (url1, commitish1) in gathered for (url2, commitish2) in gathered if url1 == url2): sys.exit("error: cloning two git repositories with the same " "name but different commitish is not supported.") for url, commitish in gathered: print('echo "Cloning %s#%s"' % (url, commitish)) if args.jenkins: clone_jenkins(url, args.repos, commitish, args.credentials) # clone is assumed for all other flags else: util.makedirs(args.repos) try: nodetached = args.patchqueue or args.repatched clone(url, args.repos, commitish, nodetached) except git.GitCommandError as gce: print(gce.stderr)
def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) package_name = args.package xs_path = os.getcwd() spec = load_spec_and_lnk(xs_path, package_name) pin = get_pin_content(args, spec) if not (args.quiet or args.show or args.unpin): print(json.dumps(pin, indent=2, sort_keys=True, separators=(',', ': '))) default_output = "PINS/{}.pin".format(package_name) output = args.output if args.write: output = default_output if output is not None: path = os.path.dirname(output) makedirs(path) with open(output, "w") as out: json.dump(pin, out, indent=2, sort_keys=True) if args.show: try: with open(default_output, 'r') as infile: print(infile.read()) except IOError as err: if err.errno not in (errno.ENOENT,): raise print("Package '{}' is not pinned".format(package_name)) if args.unpin: try: os.remove(default_output) except OSError as err: if err.errno not in (errno.ENOENT,): raise
def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) package_name = args.package xs_path = os.getcwd() spec = load_spec_and_lnk(xs_path, package_name) pin = get_pin_content(args, spec) if not (args.quiet or args.show or args.unpin): print(json.dumps(pin, indent=2, sort_keys=True, separators=(',', ': '))) default_output = "PINS/{}.pin".format(package_name) output = args.output if args.write: output = default_output if output is not None: path = os.path.dirname(output) makedirs(path) with open(output, "w") as out: json.dump(pin, out, indent=2, sort_keys=True) if args.show: try: with open(default_output, 'r') as infile: print(infile.read()) except IOError as err: if err.errno not in (errno.ENOENT, ): raise print("Package '{}' is not pinned".format(package_name)) if args.unpin: try: os.remove(default_output) except OSError as err: if err.errno not in (errno.ENOENT, ): raise
def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) for pinpath in args.pins: pin = Link(pinpath) if args.jenkins: print 'echo "Cloning %s"' % pin.url clone_jenkins(pin.url, args.repos, pin.commitish, args.credentials) else: print "Cloning %s" % pin.url util.makedirs(args.repos) pq_repo = clone(pin.url, args.repos, pin.commitish) if pin.base is not None: print "Cloning %s" % pin.base base_repo = clone(pin.base, args.repos, pin.base_commitish) # Symlink the patchqueue repository into .git/patches link_path = relpath(pq_repo.working_dir, base_repo.git_dir) symlink(link_path, join(base_repo.git_dir, "patches")) # Symlink the patchqueue directory to match the base_repo # branch name as guilt expects patchqueue_path = join(base_repo.git_dir, "patches", base_repo.active_branch.name) branch_path = dirname(base_repo.active_branch.name) util.makedirs(dirname(patchqueue_path)) symlink(relpath(pin.patchqueue, branch_path), patchqueue_path) # Create empty guilt status for the branch status = join(patchqueue_path, 'status') open(status, 'w').close() # Push patchqueue subprocess.check_call(['guilt', 'push', '--all'], cwd=base_repo.working_dir)
def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) package_name = args.package xs_path = os.getcwd() spec = load_spec_and_lnk(xs_path, package_name) pin = get_pin_content(args, spec) if not args.quiet: print(json.dumps(pin, indent=2, sort_keys=True)) output = args.output if args.write: output = "PINS/{}.pin".format(package_name) if output is not None: path = os.path.dirname(output) makedirs(path) with open(output, "w") as out: json.dump(pin, out, indent=2, sort_keys=True)
def main(argv=None): """ Entry point """ args = parse_args_or_exit(argv) for pinpath in args.pins: pin = Link(pinpath) reponame = os.path.basename(pin.url).rsplit(".git")[0] checkoutdir = os.path.join(args.repos, reponame) if args.jenkins: print 'echo "Cloning %s"' % pin.url print CHECKOUT_TEMPLATE.substitute(url=pin.url, branch=pin.commitish, checkoutdir=checkoutdir, credentials=args.credentials) else: print "Cloning %s" % pin.url util.makedirs(os.path.dirname(checkoutdir)) clone(pin.url, checkoutdir, pin.commitish) if pin.base is not None: base_reponame = os.path.basename(pin.base).rsplit(".git")[0] base_checkoutdir = os.path.join(args.repos, base_reponame) print "Cloning %s" % pin.base util.makedirs(os.path.dirname(base_checkoutdir)) clone(pin.base, base_checkoutdir, pin.base_commitish) # Symlink the patchqueue patch_path = os.path.join(base_checkoutdir, ".git/patches") link_path = os.path.relpath(checkoutdir, patch_path) util.makedirs(patch_path) os.symlink(os.path.join(link_path, pin.patchqueue), os.path.join(patch_path, pin.base_commitish)) # Create empty guilt status for the branch status = os.path.join(patch_path, pin.base_commitish, 'status') fileh = open(status, 'w') fileh.close() # Push patchqueue subprocess.check_call(['guilt', 'push', '--all'], cwd=base_checkoutdir)