コード例 #1
0
ファイル: createmockconfig.py プロジェクト: euanh/planex
def main(argv=None):
    """
    Main function.  Create a mock config containing yum repositories
    """
    setup_sigint_handler()
    args = parse_args_or_exit(argv)
    setup_logging(args)

    yum_repos = load_yum_repos(args.repo_config_list)

    # load the reference config
    reference = os.path.join(args.configdir, args.root + '.cfg')
    config_opts = load_mock_reference(reference)
    if args.config_opt:
        config_opts.update(args.config_opt)
    if args.environment:
        if 'environment' not in config_opts:
            config_opts['environment'] = {}
        config_opts['environment'].update(args.environment)
    conf_key = 'dnf.conf' if 'dnf.conf' in config_opts else 'yum.conf'
    mock_config_fp = StringIO.StringIO(config_opts[conf_key])
    mock_repos = ConfigParser.SafeConfigParser()
    mock_repos.readfp(mock_config_fp)

    # replace repo sections in the mock config
    update_mock_repos(mock_repos, yum_repos, args.yum_config_opt)
    mock_config_fp.truncate(0)
    mock_repos.write(mock_config_fp)
    config_opts[conf_key] = mock_config_fp.getvalue()

    # write new config
    with open(args.mockconfig, "w") as fileh:
        write_mock_cfg(fileh, config_opts)
コード例 #2
0
ファイル: createmockconfig.py プロジェクト: makunterry/planex
def main(argv=None):
    """
    Main function.  Create a mock config containing yum repositories
    """
    setup_sigint_handler()
    args = parse_args_or_exit(argv)
    setup_logging(args)

    yum_repos = load_yum_repos(args.enablerepo)

    # load the reference config
    reference = os.path.join(args.configdir, args.root + '.cfg')
    config_opts = load_mock_reference(reference)
    if args.config_opt:
        config_opts.update(args.config_opt)
    if args.environment:
        if 'environment' not in config_opts:
            config_opts['environment'] = {}
        config_opts['environment'].update(args.environment)
    conf_key = 'dnf.conf' if 'dnf.conf' in config_opts else 'yum.conf'
    mock_config_fp = StringIO.StringIO(config_opts[conf_key])
    mock_repos = ConfigParser.SafeConfigParser()
    mock_repos.readfp(mock_config_fp)

    # replace repo sections in the mock config
    update_mock_repos(mock_repos, yum_repos, args.yum_config_opt)
    mock_config_fp.truncate(0)
    mock_repos.write(mock_config_fp)
    config_opts[conf_key] = mock_config_fp.getvalue()

    # write new config
    with open(args.mockconfig, "w") as fileh:
        write_mock_cfg(fileh, config_opts)
コード例 #3
0
ファイル: pin.py プロジェクト: salvocambria/planex
def main(argv):
    """
    Main function
    """
    setup_sigint_handler()
    args = parse_args_or_exit(argv)
    setup_logging(args)
    args.func(args)
コード例 #4
0
ファイル: pin.py プロジェクト: salvocambria/planex
def main(argv):
    """
    Main function
    """
    setup_sigint_handler()
    args = parse_args_or_exit(argv)
    setup_logging(args)
    args.func(args)
コード例 #5
0
ファイル: fetch.py プロジェクト: xenserver/planex
def main(argv=None):
    """
    Main function.  Fetch sources directly or via a link file.
    """
    setup_sigint_handler()
    args = parse_args_or_exit(argv)
    setup_logging(args)

    fetch_source(args)
コード例 #6
0
ファイル: fetch.py プロジェクト: psafont/planex
def main(argv=None):
    """
    Main function.  Fetch sources directly or via a link file.
    """
    setup_sigint_handler()
    args = parse_args_or_exit(argv)
    setup_logging(args)

    fetch_source(args)
コード例 #7
0
def main(argv=None):
    """
    Entry point
    """
    args = parse_args_or_exit(argv)
    util.setup_logging(args)
    link = Link(args.link)

    # Repo and ending tag are specified in the link file
    repo = link.url
    end_tag = link.commitish
    if end_tag is None:
        end_tag = "HEAD"

    # If the repository URL in the link is remote, look for a
    # local clone in repos (without a .git suffix)
    url = urlparse(repo)
    if url.scheme:
        reponame = os.path.basename(url.path).rsplit(".git")[0]
        repo = os.path.join(args.repos, reponame)

    util.makedirs(os.path.dirname(args.tarball))
    with open('{0}.origin'.format(args.tarball), 'w') as origin_file:
        origin_file.write('{0}\n'.format(git.origin_url(repo)))

    if repo.endswith(".pg"):
        with FileUpdate(args.tarball) as outfile:
            git.archive(repo, end_tag, outfile)
        sys.exit(0)

    # Start tag is based on the version specified in the spec file,
    # but the tag name may be slightly different (v1.2.3 rather than 1.2.3)
    # If the link file does not list a spec file, assume that there is one in
    # the usual place
    basename = os.path.splitext(os.path.basename(args.link))[0]
    spec_path = os.path.join("SPECS", "%s.spec" % basename)
    spec = Spec(spec_path)

    start_tag = link.base_commitish
    if start_tag is None:
        start_tag = spec.version()
        if start_tag not in git.tags(repo):
            start_tag = "v%s" % start_tag

    try:
        tmpdir = tempfile.mkdtemp(prefix="px-pq-")
        assemble_patchqueue(tmpdir, link, repo, start_tag, end_tag)
        assemble_extra_sources(tmpdir, repo, spec, link)
        with FileUpdate(args.tarball) as outfile:
            tarball.make(tmpdir, outfile)

    finally:
        if args.keeptmp:
            print("Working directory retained at %s" % tmpdir)
        else:
            shutil.rmtree(tmpdir)
コード例 #8
0
ファイル: clone.py プロジェクト: TimSmithCtx/planex
def main(argv=None):
    """
    Entry point
    """
    args = parse_args_or_exit(argv)
    setup_logging(args)

    for pin in args.pins:
        package, spec_path, link_pin_path = definitions_for(pin)

        link_pin = None
        if link_pin_path:
            logging.debug("Reading link/pin file %s", link_pin_path)
            link_pin = Link(str(link_pin_path))
        logging.debug("Reading spec file %s", spec_path)
        spec = planex.spec.load(str(spec_path), link=link_pin,
                                check_package_name=False)

        if args.clone:
            # just clone git resources
            clone_all_git(args, spec)
        elif args.jenkins:
            # generate Jenkins information
            clone_jenkins(args, spec)
        else:
            resources = spec.resources_dict()
            src_res = resources['Source0']
            if src_res.is_repo:
                # remove trailing '.git'
                repo_path = Path(args.repos,
                                 src_res.basename.rsplit(".git")[0])
            else:
                repo_path = Path(args.repos,
                                 get_non_repo_name(src_res.url, package))
            if not repo_path.parent.exists():
                repo_path.parent.mkdir(parents=True)

            if "PatchQueue0" in resources:
                if "Archive0" in resources:
                    # component with patches and patchqueue
                    clone_with_patches(spec_path, repo_path,
                                       resources['Source0'],
                                       resources['Archive0'],
                                       resources['PatchQueue0'])
                else:
                    # component with patchqueue
                    clone_with_patchq(args.repos, repo_path,
                                      resources['Source0'],
                                      resources['PatchQueue0'])
            elif "Archive0" in resources:
                # component with patches
                clone_with_patches(spec_path, repo_path, resources['Source0'],
                                   resources['Archive0'], None)
            else:
                # clone all fetchable resources
                clone_all_fetchable(args, package, spec)
コード例 #9
0
ファイル: clone.py プロジェクト: xenserver/planex
def main(argv=None):
    """
    Entry point
    """
    args = parse_args_or_exit(argv)
    setup_logging(args)

    for pin in args.pins:
        package, spec_path, link_pin_path = definitions_for(pin)

        link_pin = None
        if link_pin_path:
            logging.debug("Reading link/pin file %s", link_pin_path)
            link_pin = Link(str(link_pin_path))
        logging.debug("Reading spec file %s", spec_path)
        spec = planex.spec.load(str(spec_path), link=link_pin,
                                check_package_name=False)

        if args.clone:
            # just clone git resources
            clone_all_git(args, spec)
        elif args.jenkins:
            # generate Jenkins information
            clone_jenkins(args, spec)
        else:
            resources = spec.resources_dict()
            src_res = resources['Source0']
            if src_res.is_repo:
                # remove trailing '.git'
                repo_path = Path(args.repos,
                                 src_res.basename.rsplit(".git")[0])
            else:
                repo_path = Path(args.repos,
                                 get_non_repo_name(src_res.url, package))
            if not repo_path.parent.exists():
                repo_path.parent.mkdir(parents=True)

            if "PatchQueue0" in resources:
                if "Archive0" in resources:
                    # component with patches and patchqueue
                    clone_with_patches(spec_path, repo_path,
                                       resources['Source0'],
                                       resources['Archive0'],
                                       resources['PatchQueue0'])
                else:
                    # component with patchqueue
                    clone_with_patchq(args.repos, repo_path,
                                      resources['Source0'],
                                      resources['PatchQueue0'])
            elif "Archive0" in resources:
                # component with patches
                clone_with_patches(spec_path, repo_path, resources['Source0'],
                                   resources['Archive0'], None)
            else:
                # clone all fetchable resources
                clone_all_fetchable(args, package, spec)
コード例 #10
0
ファイル: cache.py プロジェクト: johnelse/planex
def main(argv):
    """
    Main function.  Parse spec file and iterate over its sources, downloading
    them as appropriate.
    """
    util.setup_sigint_handler()
    intercepted_args, passthrough_args = parse_args_or_exit(argv)
    config = os.path.join(intercepted_args.configdir,
                          intercepted_args.root + ".cfg")

    # Initialize yum before setting up logging, because yum uses
    # logging with a different default loglevel.   This avoids
    # having yum print lots of irrelevant messages during startup.
    yum_config = util.load_mock_config(config)
    yumbase = util.get_yumbase(yum_config)
    setup_yumbase(yumbase)

    util.setup_logging(intercepted_args)

    srpm = load_srpm_from_file(passthrough_args[-1])
    with open(config) as cfg:
        mock_config = cfg.read()
    pkg_hash = get_srpm_hash(srpm, yumbase, mock_config)

    cachedirs = [os.path.expanduser(x) for x
                 in intercepted_args.cachedirs.split(':')]

    # Expand default resultdir as done in mock.backend.Root
    resultdir = intercepted_args.resultdir or \
        yum_config['resultdir'] % yum_config

    if not os.path.isdir(resultdir):
        os.makedirs(resultdir)

    # Rebuild if not available in the cache
    if not in_cache(cachedirs, pkg_hash):
        logging.debug("Cache miss - rebuilding")
        build_output = build_package(intercepted_args.configdir,
                                     intercepted_args.root, passthrough_args)
        try:
            add_to_cache(cachedirs, pkg_hash, build_output)
        except OSError:
            # If we can't cache the result, that's not a fatal error
            pass

        for cached_file in os.listdir(build_output):
            dest = os.path.join(resultdir, cached_file)

            if os.path.exists(dest):
                os.unlink(dest)
            shutil.move(os.path.join(build_output, cached_file), resultdir)
    else:
        get_from_cache(cachedirs, pkg_hash, resultdir)
コード例 #11
0
ファイル: cache.py プロジェクト: thomasmck/planex
def main(argv):
    """
    Main function.  Parse spec file and iterate over its sources, downloading
    them as appropriate.
    """
    util.setup_sigint_handler()
    intercepted_args, passthrough_args = parse_args_or_exit(argv)
    config = os.path.join(intercepted_args.configdir,
                          intercepted_args.root + ".cfg")

    # Initialize yum before setting up logging, because yum uses
    # logging with a different default loglevel.   This avoids
    # having yum print lots of irrelevant messages during startup.
    yum_config = util.load_mock_config(config)
    yumbase = util.get_yumbase(yum_config)
    setup_yumbase(yumbase)

    util.setup_logging(intercepted_args)

    srpm = load_srpm_from_file(passthrough_args[-1])
    pkg_hash = get_srpm_hash(srpm, yumbase)

    cachedirs = [os.path.expanduser(x) for x
                 in intercepted_args.cachedirs.split(':')]

    # Expand default resultdir as done in mock.backend.Root
    resultdir = intercepted_args.resultdir or \
        yum_config['resultdir'] % yum_config

    if not os.path.isdir(resultdir):
        os.makedirs(resultdir)

    # Rebuild if not available in the cache
    if not in_cache(cachedirs, pkg_hash):
        logging.debug("Cache miss - rebuilding")
        build_output = build_package(intercepted_args.configdir,
                                     intercepted_args.root, passthrough_args)
        try:
            add_to_cache(cachedirs, pkg_hash, build_output)
        except OSError:
            # If we can't cache the result, that's not a fatal error
            pass

        for cached_file in os.listdir(build_output):
            dest = os.path.join(resultdir, cached_file)

            if os.path.exists(dest):
                os.unlink(dest)
            shutil.move(os.path.join(build_output, cached_file), resultdir)
    else:
        get_from_cache(cachedirs, pkg_hash, resultdir)
コード例 #12
0
def main(argv=None):
    """
    Main function.  Fetch sources directly or via a link file.
    """

    setup_sigint_handler()
    args = parse_args_or_exit(argv)
    setup_logging(args)

    link = Link(args.link)

    # Extract the spec file
    with tarfile.open(args.tarball) as tar:
        tar_root = archive_root(tar)
        extract_file(tar, os.path.join(tar_root, link.specfile), args.output)
コード例 #13
0
ファイル: fetch.py プロジェクト: euanh/planex
def main(argv=None):
    """
    Main function.  Fetch sources directly or via a link file.
    """
    setup_sigint_handler()
    args = parse_args_or_exit(argv)
    setup_logging(args)

    if args.spec_or_link.endswith('.spec'):
        fetch_source(args)
    elif args.spec_or_link.endswith('.lnk'):
        fetch_via_link(args)
    else:
        sys.exit("%s: Unsupported file type: %s" % (sys.argv[0],
                                                    args.spec_or_link))
コード例 #14
0
ファイル: fetch.py プロジェクト: liulinC/planex
def main(argv):
    """
    Main function.  Fetch sources directly or via a link file.
    """
    setup_sigint_handler()
    args = parse_args_or_exit(argv)
    setup_logging(args)

    if args.spec_or_link.endswith('.spec'):
        fetch_sources(args)
    elif args.spec_or_link.endswith('.lnk'):
        fetch_via_link(args)
    else:
        sys.exit("%s: Unsupported file type: %s" %
                 (sys.argv[0], args.spec_or_link))
コード例 #15
0
ファイル: extract.py プロジェクト: salvocambria/planex
def main(argv):
    """
    Main function.  Fetch sources directly or via a link file.
    """
    setup_sigint_handler()
    args = parse_args_or_exit(argv)
    setup_logging(args)

    try:
        with open(args.link) as fileh:
            link = json.load(fileh)

    except IOError as exn:
        # IO error loading JSON file
        sys.exit("%s: %s: %s" %
                 (sys.argv[0], exn.strerror, exn.filename))

    # Extract the spec file
    with tarfile.open(args.tarball) as tar:
        tar_root = archive_root(tar)
        extract_file(tar, os.path.join(tar_root, str(link['specfile'])),
                     args.output)

        if 'patchqueue' in link:
            patch_dir = os.path.join(tar_root, str(link['patchqueue']))
            expand_patchqueue(args, tar, os.path.join(patch_dir, 'series'))
        elif 'patches' in link:
            patch_dir = os.path.join(tar_root, str(link['patches']))
        else:
            sys.exit("%s: %s: Expected one of 'patchqueue' or 'patches'" %
                     (sys.argv[0], args.link))

        # Extract sources contained in the tarball
        spec = planex.spec.Spec(args.output, topdir=args.topdir,
                                check_package_name=args.check_package_names)
        for path, url in spec.all_sources():
            if url.netloc == '':
                if 'patchqueue' in link:
                    # trim off prefix
                    src_path = os.path.join(patch_dir,
                                            url.path[len(spec.name()) + 1:])
                else:
                    src_path = os.path.join(patch_dir, url.path)

                if src_path not in tar.getnames():
                    src_path = os.path.join(tar_root, url.path)
                extract_file(tar, src_path, path)
コード例 #16
0
ファイル: fetch.py プロジェクト: pombredanne/planex
def main(argv):
    """
    Main function.  Parse spec file and iterate over its sources, downloading
    them as appropriate.
    """
    setup_sigint_handler()
    args = parse_args_or_exit(argv)
    setup_logging(args)

    try:
        sources = [url_for_source(args.spec, s, args.topdir, args.check_package_names) for s in args.sources]
    except KeyError as exn:
        sys.exit("%s: No source corresponding to %s" % (sys.argv[0], exn))

    for path, url in sources:
        check_supported_url(url)
        if url.scheme in ["http", "https", "file"]:
            if url.scheme != "file" and args.mirror:
                if not urlparse.urlparse(args.mirror).scheme:
                    args.mirror = "file://" + args.mirror
                mpath = os.path.join(args.mirror, os.path.basename(url.path))
                url = urlparse.urlparse(mpath)

            try:
                fetch_http(url, path, args.retries + 1)

            except pycurl.error as exn:
                # Curl download failed
                sys.exit("%s: Failed to fetch %s: %s" % (sys.argv[0], urlparse.urlunparse(url), exn.args[1]))

            except IOError as exn:
                # IO error saving source file
                sys.exit("%s: %s: %s" % (sys.argv[0], exn.strerror, exn.filename))

        elif url.scheme == "" and os.path.dirname(url.path) == "":
            if not os.path.exists(path):
                sys.exit("%s: Source not found: %s" % (sys.argv[0], path))

            # Source file is pre-populated in the SOURCES directory (part of
            # the repository - probably a patch or local include).   Update
            # its timestamp to placate make, but don't try to download it.
            logging.debug("Refreshing timestamp for local source %s", path)
            os.utime(path, None)
コード例 #17
0
ファイル: manifest.py プロジェクト: euanh/planex
def main(argv=None):
    """Entry point."""

    args = parse_args_or_exit(argv)
    setup_logging(args)

    spec = Spec(args.specfile_path)

    link = None
    if args.lnkfile_path is not None:
        link = Link(args.lnkfile_path)

    pin = None
    pinfile = "{}/{}.pin".format(
        args.pinsdir,
        get_name(args.specfile_path, args.lnkfile_path)
        )
    if os.path.exists(pinfile):
        pin = pinfile

    manifest = generate_manifest(spec, link, pin)
    print(json.dumps(manifest, indent=4))
コード例 #18
0
ファイル: manifest.py プロジェクト: makunterry/planex
def main(argv=None):
    """Entry point."""

    args = parse_args_or_exit(argv)
    setup_logging(args)

    spec = Spec(args.specfile_path)

    link = None
    if args.lnkfile_path is not None:
        link = Link(args.lnkfile_path)

    pin = None
    pinfile = "{}/{}.pin".format(
        args.pinsdir,
        get_name(args.specfile_path, args.lnkfile_path)
        )
    if os.path.exists(pinfile):
        pin = pinfile

    manifest = generate_manifest(spec, link, pin)
    print(json.dumps(manifest, indent=4))
コード例 #19
0
def main(argv):
    """
    Main function.  Fetch sources directly or via a link file.
    """
    # pylint: disable=R0914

    setup_sigint_handler()
    args = parse_args_or_exit(argv)
    setup_logging(args)

    try:
        with open(args.link) as fileh:
            link = json.load(fileh)

    except IOError as exn:
        # IO error loading JSON file
        sys.exit("%s: %s: %s" %
                 (sys.argv[0], exn.strerror, exn.filename))

    # Extract the spec file
    with tarfile.open(args.tarball) as tar:
        tar_root = archive_root(tar)
        extract_file(tar, os.path.join(tar_root, str(link['specfile'])),
                     args.output + '.tmp')

        macros = [tuple(macro.split(' ', 1)) for macro in args.define]

        if any(len(macro) != 2 for macro in macros):
            _err = [macro for macro in macros if len(macro) != 2]
            print "error: malformed macro passed to --define: %r" % _err
            sys.exit(1)

        # When using deprecated arguments, we want them at the top of the
        # macros list
        if args.topdir is not None:
            print "# warning: --topdir is deprecated"
            macros.insert(0, ('_topdir', args.topdir))

        with open(args.output, "w") as spec_fh:
            check_names = args.check_package_names
            spec = planex.spec.Spec(args.output + '.tmp',
                                    check_package_name=check_names,
                                    defines=macros)
            write_manifest(spec_fh, spec, link)
            if 'branch' in link:
                spec_fh.write("%%define branch %s\n" % link['branch'])

            if 'patchqueue' in link:
                patch_dir = os.path.join(tar_root, str(link['patchqueue']))
                expand_patchqueue(spec_fh, spec, args.output + '.tmp',
                                  tar, os.path.join(patch_dir, 'series'))
            elif 'patches' in link:
                patch_dir = os.path.join(tar_root, str(link['patches']))
                copy_spec(args.output + '.tmp', spec_fh)
            else:
                sys.exit("%s: %s: Expected one of 'patchqueue' or 'patches'" %
                         (sys.argv[0], args.link))

        # Extract sources contained in the tarball
        spec = planex.spec.Spec(args.output,
                                check_package_name=args.check_package_names,
                                defines=macros)
        for path, url in spec.all_sources():
            if url.netloc == '':
                if 'patchqueue' in link:
                    # trim off prefix
                    src_path = os.path.join(patch_dir,
                                            url.path[len(spec.name()) + 1:])
                else:
                    src_path = os.path.join(patch_dir, url.path)

                if src_path not in tar.getnames():
                    src_path = os.path.join(tar_root, url.path)
                extract_file(tar, src_path, path)