Beispiel #1
0
def update_index(dir_paths, config=None, force=False, check_md5=False, remove=False):
    from locale import getpreferredencoding
    import os
    from .conda_interface import PY3
    from conda_build.index import update_index
    dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)]
    # Don't use byte strings in Python 2
    if not PY3:
        dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]

    if not config:
        config = Config()

    for path in dir_paths:
        update_index(path, config, force=force, check_md5=check_md5, remove=remove)
Beispiel #2
0
def inspect_prefix_length(packages, min_prefix_length=_prefix_length):
    from conda_build.tarcheck import check_prefix_lengths
    config = Config(prefix_length=min_prefix_length)
    packages = _ensure_list(packages)
    prefix_lengths = check_prefix_lengths(packages, config)
    if prefix_lengths:
        print("Packages with binary prefixes shorter than %d characters:" %
              min_prefix_length)
        for fn, length in prefix_lengths.items():
            print("{0} ({1} chars)".format(fn, length))
    else:
        print(
            "No packages found with binary prefixes shorter than %d characters."
            % min_prefix_length)
    return len(prefix_lengths) == 0
Beispiel #3
0
def get_package_variants(recipedir_or_metadata, config=None, variants=None):
    if hasattr(recipedir_or_metadata, 'config'):
        config = recipedir_or_metadata.config
    if not config:
        from conda_build.config import Config
        config = Config()
    files = find_config_files(recipedir_or_metadata, ensure_list(config.variant_config_files),
                              ignore_system_config=config.ignore_system_variants,
                              exclusive_config_files=config.exclusive_config_files)

    specs = OrderedDict(internal_defaults=get_default_variant(config))

    for f in files:
        specs[f] = parse_config_file(f, config)

    # this is the override of the variants from files and args with values from CLI or env vars
    if hasattr(config, 'variant') and config.variant:
        specs['config.variant'] = config.variant
    if variants:
        specs['argument_variants'] = variants

    for f, spec in specs.items():
        try:
            validate_spec(spec)
        except ValueError as e:
            raise ValueError("Error in config {}: {}".format(f, str(e)))

    # this merges each of the specs, providing a debug message when a given setting is overridden
    #      by a later spec
    combined_spec = combine_specs(specs, log_output=config.verbose)

    extend_keys = set(ensure_list(combined_spec.get('extend_keys')))
    extend_keys.update({'zip_keys', 'extend_keys'})

    # delete the default specs, so that they don't unnecessarily limit the matrix
    specs = specs.copy()
    del specs['internal_defaults']

    combined_spec = dict_of_lists_to_list_of_dicts(combined_spec, extend_keys=extend_keys)
    for source, source_specs in reversed(specs.items()):
        for k, vs in source_specs.items():
            if k not in extend_keys:
                # when filtering ends up killing off all variants, we just ignore that.  Generally,
                #    this arises when a later variant config overrides, rather than selects a
                #    subspace of earlier configs
                combined_spec = (filter_by_key_value(combined_spec, k, vs, source_name=source) or
                                 combined_spec)
    return combined_spec
Beispiel #4
0
    def fromdict(cls, metadata, config=None):
        """
        Create a MetaData object from metadata dict directly.
        """
        m = super(MetaData, cls).__new__(cls)
        m.path = ''
        m.meta_path = ''
        m.meta = sanitize(metadata)

        if not config:
            config = Config()

        m.config = config
        m.undefined_jinja_vars = []

        return m
Beispiel #5
0
def execute(args):
    p, args = parse_args(args)

    config = Config()
    set_language_env_vars(args, p, config)

    metadata, _, _ = render_recipe(args.recipe,
                                   no_download_source=args.no_source,
                                   config=config)
    if args.output:
        logging.basicConfig(level=logging.ERROR)
        silence_loggers(show_warnings_and_errors=False)
        print(bldpkg_path(metadata, config=config))
    else:
        logging.basicConfig(level=logging.INFO)
        print(output_yaml(metadata, args.file))
Beispiel #6
0
def build_conda_pack(base_path, tmp, hexrd_package_channel,
                     hexrdgui_output_folder):
    # First build the hexrdgui package
    recipe_path = str(base_path / '..' / 'conda.recipe')
    config = Config()
    config.channel = ['cjh1', 'anaconda', 'conda-forge']
    config.channel_urls = ['cjh1', 'anaconda', 'conda-forge']

    if hexrdgui_output_folder is not None:
        config.output_folder = hexrdgui_output_folder

    if hexrd_package_channel is not None:
        config.channel.insert(0, 'hexrd-channel')
        config.channel_urls.insert(0, hexrd_package_channel)

    config.CONDA_PY = '38'
    logger.info('Building hexrdgui conda package.')
    CondaBuild.build(recipe_path, config=config)

    logger.info('Creating new conda environment.')
    # Now create a new environment to install the package into
    env_prefix = str(tmp / package_env_name)
    Conda.run_command(Conda.Commands.CREATE, '--prefix', env_prefix,
                      'python=3.8')

    hexrdgui_output_folder_uri = Path(
        hexrdgui_output_folder).absolute().as_uri()

    logger.info('Installing hexrdgui into new environment.')
    # Install hexrdgui into new environment
    params = [
        Conda.Commands.INSTALL, '--prefix', env_prefix, '--channel',
        hexrdgui_output_folder_uri, '--channel', hexrd_package_channel,
        '--channel', 'cjh1', '--channel', 'anaconda', '--channel',
        'conda-forge', 'hexrdgui'
    ]
    Conda.run_command(*params)

    logger.info('Generating tar from environment using conda-pack.')
    # Now use conda-pack to create relocatable archive
    archive_path = str(tmp / ('hexrdgui.%s' % archive_format))
    CondaPack.pack(prefix=env_prefix,
                   output=archive_path,
                   format=archive_format)

    return archive_path
Beispiel #7
0
def parse_meta_yaml(text):
    """Parse the meta.yaml.

    Parameters
    ----------
    text : str
        The raw text in conda-forge feedstock meta.yaml file

    Returns
    -------
    dict :
        The parsed YAML dict. If parseing fails, returns an empty dict.

    """

    content = render_meta_yaml(text)
    return parse(content, Config())
Beispiel #8
0
def parsed_meta_yaml(text):
    """
    :param str text: The raw text in conda-forge feedstock meta.yaml file
    :return: `dict|None` -- parsed YAML dict if successful, None if not
    """
    try:
        env = jinja2.Environment(undefined=NullUndefined)
        content = env.from_string(text).render(
                                os=os,
                                environ=defaultdict(lambda: ''),
                                compiler=lambda x: x + '_compiler_stub',
                                pin_subpackage=lambda *args, **kwargs: 'subpackage_stub',
                                pin_compatible=lambda *args, **kwargs: 'compatible_pin_stub',
                                cdt=lambda *args, **kwargs: 'cdt_stub',)
        return parse(content, Config())
    except:
        return {}
Beispiel #9
0
    def fromdict(cls, metadata, config=None, variant=None):
        """
        Create a MetaData object from metadata dict directly.
        """
        m = super(MetaData, cls).__new__(cls)
        m.path = ''
        m.meta_path = ''
        m.requirements_path = ''
        m.meta = sanitize(metadata)

        if not config:
            config = Config(variant=variant)

        m.config = config
        m.undefined_jinja_vars = []
        m.final = False

        return m
Beispiel #10
0
def parse_meta_yaml(text, **kwargs):
    """Parse the meta.yaml.

    Parameters
    ----------
    text : str
        The raw text in conda-forge feedstock meta.yaml file

    Returns
    -------
    dict :
        The parsed YAML dict. If parseing fails, returns an empty dict.

    """
    from conda_build.config import Config
    from conda_build.metadata import parse

    content = render_meta_yaml(text)
    return parse(content, Config(**kwargs))
Beispiel #11
0
def _get_source_code(recipe_dir):
    try:
        from conda_build.api import render
        from conda_build.config import Config
        from conda_build.source import provide

        # Use conda build to do all the downloading/extracting bits
        md = render(
            recipe_dir,
            config=Config(**CB_CONFIG),
            finalize=False,
            bypass_env_check=True,
        )
        if not md:
            return None
        md = md[0][0]
        # provide source dir
        return provide(md)
    except (SystemExit, Exception) as e:
        raise RuntimeError("conda build src exception:" + str(e))
Beispiel #12
0
def _get_source_code(recipe_dir):
    from conda_build.api import render
    from conda_build.config import Config
    from conda_build.source import provide

    # Use conda build to do all the downloading/extracting bits
    md = render(
        recipe_dir,
        config=Config(**CB_CONFIG),
        finalize=False,
        bypass_env_check=True,
    )
    if not md:
        return None
    md = md[0][0]
    # provide source dir
    try:
        return provide(md)
    except SystemExit:
        raise RuntimeError(f"Could not download source for {recipe_dir}!")
Beispiel #13
0
def prepare(**kwargs):
    """
    Prepare and configure the stage for mambabuild to run.

    The given **kwargs are passed to conda-build's Config which
    is the value returned by this function.
    """
    config = Config(**kwargs)
    config.channel_urls = get_channel_urls(kwargs)

    init_api_context()

    config.output_folder = os.path.abspath(config.output_folder)
    if not os.path.exists(config.output_folder):
        mkdir_p(config.output_folder)

    print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    return config
Beispiel #14
0
def get_package_variants(recipedir_or_metadata, config=None):
    if hasattr(recipedir_or_metadata, 'config'):
        config = recipedir_or_metadata.config
    if not config:
        from conda_build.config import Config
        config = Config()
    files = find_config_files(
        recipedir_or_metadata,
        ensure_list(config.variant_config_files),
        ignore_system_config=config.ignore_system_variants)

    specs = get_default_variants(
        config.platform) + [parse_config_file(f, config) for f in files]

    target_platform_default = [{'target_platform': config.subdir}]
    # this is the override of the variants from files and args with values from CLI or env vars
    if config.variant:
        combined_spec, extend_keys = combine_specs(target_platform_default +
                                                   specs + [config.variant])
    else:
        # this tweaks behavior from clobbering to appending/extending
        combined_spec, extend_keys = combine_specs(target_platform_default +
                                                   specs)

    # clobber the variant with anything in the config (stuff set via CLI flags or env vars)
    for k, v in config.variant.items():
        if k in extend_keys:
            if hasattr(combined_spec[k], 'keys'):
                combined_spec[k].update(v)
            else:
                combined_spec[k].extend(v)
        elif k == 'zip_keys':
            combined_spec[k].extend(v)
            combined_spec[k] = list(
                list(set_group) for set_group in set(
                    tuple(group) for group in combined_spec[k]))
        else:
            combined_spec[k] = [v]

    validate_variant(combined_spec)
    return dict_of_lists_to_list_of_dicts(combined_spec, config.platform)
Beispiel #15
0
def testing_config(testing_workdir):
    def boolify(v):
        return True if 'v' == 'true' else False
    result = Config(croot=testing_workdir, anaconda_upload=False, verbose=True,
                    activate=False, debug=False, variant=None, test_run_post=False,
                    # These bits ensure that default values are used instead of any
                    # present in ~/.condarc
                    filename_hashing=filename_hashing_default,
                    _src_cache_root=_src_cache_root_default,
                    error_overlinking=boolify(error_overlinking_default),
                    error_overdepending=boolify(error_overdepending_default),
                    noarch_python_build_age=noarch_python_build_age_default,
                    enable_static=boolify(enable_static_default),
                    no_rewrite_stdout_env=boolify(no_rewrite_stdout_env_default),
                    ignore_verify_codes=ignore_verify_codes_default,
                    exit_on_verify_error=exit_on_verify_error_default,
                    conda_pkg_format=conda_pkg_format_default)
    assert result.no_rewrite_stdout_env is False
    assert result._src_cache_root is None
    assert result.src_cache_root == testing_workdir
    assert result.noarch_python_build_age == 0
    return result
Beispiel #16
0
def get_package_combined_spec(recipedir_or_metadata,
                              config=None,
                              variants=None):
    # outputs a tuple of (combined_spec_dict_of_lists, used_spec_file_dict)
    #
    # The output of this function is order preserving, unlike get_package_variants
    if hasattr(recipedir_or_metadata, 'config'):
        config = recipedir_or_metadata.config
    if not config:
        from conda_build.config import Config
        config = Config()
    files = find_config_files(
        recipedir_or_metadata,
        ensure_list(config.variant_config_files),
        ignore_system_config=config.ignore_system_variants,
        exclusive_config_files=config.exclusive_config_files)

    specs = OrderedDict(internal_defaults=get_default_variant(config))

    for f in files:
        specs[f] = parse_config_file(f, config)

    # this is the override of the variants from files and args with values from CLI or env vars
    if hasattr(config, 'variant') and config.variant:
        specs['config.variant'] = config.variant
    if variants:
        specs['argument_variants'] = variants

    for f, spec in specs.items():
        try:
            validate_spec(spec)
        except ValueError as e:
            raise ValueError("Error in config {}: {}".format(f, str(e)))

    # this merges each of the specs, providing a debug message when a given setting is overridden
    #      by a later spec
    combined_spec = combine_specs(specs, log_output=config.verbose)
    return combined_spec, specs
Beispiel #17
0
def create_metapackage(name,
                       version,
                       entry_points=(),
                       build_string=None,
                       build_number=0,
                       dependencies=(),
                       home=None,
                       license_name=None,
                       summary=None,
                       config=None):
    from .metapackage import create_metapackage
    if not config:
        config = Config()
    return create_metapackage(name=name,
                              version=version,
                              entry_points=entry_points,
                              build_string=build_string,
                              build_number=build_number,
                              dependencies=dependencies,
                              home=home,
                              license_name=license_name,
                              summary=summary,
                              config=config)
Beispiel #18
0
def create_metapackage(name, version, entry_points=(), build_string=None, build_number=0,
                       dependencies=(), home=None, license_name=None, summary=None, config=None):
    # local import to avoid circular import, we provid create_metapackage in api
    from conda_build.build import build

    if not config:
        config = Config()

    d = defaultdict(dict)
    d['package']['name'] = name
    d['package']['version'] = version
    d['build']['number'] = build_number
    d['build']['entry_points'] = entry_points
    # MetaData does the auto stuff if the build string is None
    d['build']['string'] = build_string
    d['requirements']['run'] = dependencies
    d['about']['home'] = home
    d['about']['license'] = license_name
    d['about']['summary'] = summary
    d = dict(d)
    m = MetaData.fromdict(d, config=config)
    config.compute_build_id(m.name())

    return build(m, config=config, need_source_download=False)
Beispiel #19
0
def parse_meta_yaml(text: str,
                    for_pinning=False,
                    **kwargs: Any) -> "MetaYamlTypedDict":
    """Parse the meta.yaml.

    Parameters
    ----------
    text : str
        The raw text in conda-forge feedstock meta.yaml file

    Returns
    -------
    dict :
        The parsed YAML dict. If parsing fails, returns an empty dict.

    """
    from conda_build.config import Config
    from conda_build.metadata import parse

    if for_pinning:
        content = render_meta_yaml(text, for_pinning=for_pinning)
    else:
        content = render_meta_yaml(text)
    return parse(content, Config(**kwargs))
Beispiel #20
0
def testing_index(request):
    index, index_ts = get_build_index(config=Config(debug=False,
                                                    verbose=False),
                                      subdir=subdir,
                                      clear_cache=True)
    return index
Beispiel #21
0
def test_long_test_prefix(additional_args, is_long_test_prefix):
    args = ['non_existing_recipe'] + additional_args
    parser, args = main_build.parse_args(args)
    config = Config(**args.__dict__)
    assert config.long_test_prefix is is_long_test_prefix
Beispiel #22
0
def skeletonize(packages,
                output_dir=".",
                version=None,
                git_tag=None,
                cran_url="https://cran.r-project.org/",
                recursive=False,
                archive=True,
                version_compare=False,
                update_outdated=False,
                config=None):

    if not config:
        config = Config()

    if len(packages) > 1 and version_compare:
        raise ValueError(
            "--version-compare only works with one package at a time")
    if not update_outdated and not packages:
        raise ValueError("At least one package must be supplied")

    package_dicts = {}

    cran_metadata = get_cran_metadata(cran_url, output_dir)

    if update_outdated:
        packages = get_outdated(output_dir, cran_metadata, packages)
        for pkg in packages:
            rm_rf(join(output_dir[0], 'r-' + pkg))

    while packages:
        package = packages.pop()

        is_github_url = 'github.com' in package
        url = package

        if is_github_url:
            rm_rf(config.work_dir)
            m = metadata.MetaData.fromdict({'source': {
                'git_url': package
            }},
                                           config=config)
            source.git_source(m.get_section('source'), m.config.git_cache,
                              m.config.work_dir)
            git_tag = git_tag[0] if git_tag else get_latest_git_tag(config)
            p = subprocess.Popen(['git', 'checkout', git_tag],
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE,
                                 cwd=config.work_dir)
            stdout, stderr = p.communicate()
            stdout = stdout.decode('utf-8')
            stderr = stderr.decode('utf-8')
            if p.returncode:
                sys.exit(
                    "Error: 'git checkout %s' failed (%s).\nInvalid tag?" %
                    (git_tag, stderr.strip()))
            if stdout:
                print(stdout, file=sys.stdout)
            if stderr:
                print(stderr, file=sys.stderr)

            DESCRIPTION = join(config.work_dir, "DESCRIPTION")
            if not isfile(DESCRIPTION):
                sub_description_pkg = join(config.work_dir, 'pkg',
                                           "DESCRIPTION")
                sub_description_name = join(config.work_dir,
                                            package.split('/')[-1],
                                            "DESCRIPTION")
                if isfile(sub_description_pkg):
                    DESCRIPTION = sub_description_pkg
                elif isfile(sub_description_name):
                    DESCRIPTION = sub_description_name
                else:
                    sys.exit(
                        "%s does not appear to be a valid R package "
                        "(no DESCRIPTION file in %s, %s)" %
                        (package, sub_description_pkg, sub_description_name))

            with open(DESCRIPTION) as f:
                description_text = clear_trailing_whitespace(f.read())

            d = dict_from_cran_lines(
                remove_package_line_continuations(
                    description_text.splitlines()))
            d['orig_description'] = description_text
            package = d['Package'].lower()
            cran_metadata[package] = d

        if package.startswith('r-'):
            package = package[2:]
        if package.endswith('/'):
            package = package[:-1]
        if package.lower() not in cran_metadata:
            sys.exit("Package %s not found" % package)

        # Make sure package is always uses the CRAN capitalization
        package = cran_metadata[package.lower()]['Package']

        if not is_github_url:
            session = get_session(output_dir)
            cran_metadata[package.lower()].update(
                get_package_metadata(cran_url, package, session))

        dir_path = join(output_dir, 'r-' + package.lower())
        if exists(dir_path) and not version_compare:
            raise RuntimeError("directory already exists: %s" % dir_path)

        cran_package = cran_metadata[package.lower()]

        d = package_dicts.setdefault(
            package,
            {
                'cran_packagename': package,
                'packagename': 'r-' + package.lower(),
                'build_depends': '',
                'run_depends': '',
                # CRAN doesn't seem to have this metadata :(
                'home_comment': '#',
                'homeurl': '',
                'summary_comment': '#',
                'summary': '',
            })

        if is_github_url:
            d['url_key'] = ''
            d['fn_key'] = ''
            d['git_url_key'] = 'git_url:'
            d['git_tag_key'] = 'git_tag:'
            d['hash_entry'] = '# You can add a hash for the file here, like md5, sha1 or sha256'
            d['filename'] = ''
            d['cranurl'] = ''
            d['git_url'] = url
            d['git_tag'] = git_tag
        else:
            d['url_key'] = 'url:'
            d['fn_key'] = 'fn:'
            d['git_url_key'] = ''
            d['git_tag_key'] = ''
            d['git_url'] = ''
            d['git_tag'] = ''
            d['hash_entry'] = ''

        if version:
            d['version'] = version
            raise NotImplementedError(
                "Package versions from CRAN are not yet implemented")

        d['cran_version'] = cran_package['Version']
        # Conda versions cannot have -. Conda (verlib) will treat _ as a .
        d['conda_version'] = d['cran_version'].replace('-', '_')
        if version_compare:
            sys.exit(not version_compare(dir_path, d['conda_version']))

        if not is_github_url:
            filename = '{}_{}.tar.gz'
            contrib_url = cran_url + 'src/contrib/'
            package_url = contrib_url + filename.format(
                package, d['cran_version'])

            # calculate sha256 by downloading source
            sha256 = hashlib.sha256()
            print("Downloading source from {}".format(package_url))
            sha256.update(urlopen(package_url).read())
            d['hash_entry'] = 'sha256: {}'.format(sha256.hexdigest())

            d['filename'] = filename.format(package, '{{ version }}')
            if archive:
                d['cranurl'] = (INDENT + contrib_url + d['filename'] + INDENT +
                                contrib_url + 'Archive/{}/'.format(package) +
                                d['filename'])
            else:
                d['cranurl'] = ' ' + cran_url + 'src/contrib/' + d['filename']

        d['cran_metadata'] = '\n'.join(
            ['# %s' % l for l in cran_package['orig_lines'] if l])

        # XXX: We should maybe normalize these
        d['license'] = cran_package.get("License", "None")
        d['license_family'] = guess_license_family(d['license'],
                                                   allowed_license_families)

        if 'License_is_FOSS' in cran_package:
            d['license'] += ' (FOSS)'
        if cran_package.get('License_restricts_use') == 'yes':
            d['license'] += ' (Restricts use)'

        if "URL" in cran_package:
            d['home_comment'] = ''
            d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL'])
        else:
            # use CRAN page as homepage if nothing has been specified
            d['home_comment'] = ''
            d['homeurl'] = ' https://CRAN.R-project.org/package={}'.format(
                package)

        if 'Description' in cran_package:
            d['summary_comment'] = ''
            d['summary'] = ' ' + yaml_quote_string(cran_package['Description'])

        if "Suggests" in cran_package:
            d['suggests'] = "# Suggests: %s" % cran_package['Suggests']
        else:
            d['suggests'] = ''

        # Every package depends on at least R.
        # I'm not sure what the difference between depends and imports is.
        depends = [
            s.strip() for s in cran_package.get('Depends', '').split(',')
            if s.strip()
        ]
        imports = [
            s.strip() for s in cran_package.get('Imports', '').split(',')
            if s.strip()
        ]
        links = [
            s.strip() for s in cran_package.get("LinkingTo", '').split(',')
            if s.strip()
        ]

        dep_dict = {}

        for s in set(chain(depends, imports, links)):
            match = VERSION_DEPENDENCY_REGEX.match(s)
            if not match:
                sys.exit("Could not parse version from dependency of %s: %s" %
                         (package, s))
            name = match.group('name')
            archs = match.group('archs')
            relop = match.group('relop') or ''
            version = match.group('version') or ''
            version = version.replace('-', '_')
            # If there is a relop there should be a version
            assert not relop or version

            if archs:
                sys.exit("Don't know how to handle archs from dependency of "
                         "package %s: %s" % (package, s))

            dep_dict[name] = '{relop}{version}'.format(relop=relop,
                                                       version=version)

        if 'R' not in dep_dict:
            dep_dict['R'] = ''

        for dep_type in ['build', 'run']:
            deps = []
            for name in sorted(dep_dict):
                if name in R_BASE_PACKAGE_NAMES:
                    continue
                if name == 'R':
                    # Put R first
                    # Regarless of build or run, and whether this is a recommended package or not,
                    # it can only depend on 'r-base' since anything else can and will cause cycles
                    # in the dependency graph. The cran metadata lists all dependencies anyway, even
                    # those packages that are in the recommended group.
                    r_name = 'r-base'
                    # We don't include any R version restrictions because we
                    # always build R packages against an exact R version
                    deps.insert(
                        0, '{indent}{r_name}'.format(indent=INDENT,
                                                     r_name=r_name))
                else:
                    conda_name = 'r-' + name.lower()

                    if dep_dict[name]:
                        deps.append('{indent}{name} {version}'.format(
                            name=conda_name,
                            version=dep_dict[name],
                            indent=INDENT))
                    else:
                        deps.append('{indent}{name}'.format(name=conda_name,
                                                            indent=INDENT))
                    if recursive:
                        if not exists(join(output_dir, conda_name)):
                            packages.append(name)

            if cran_package.get("NeedsCompilation", 'no') == 'yes':
                if dep_type == 'build':
                    deps.append('{indent}posix                # [win]'.format(
                        indent=INDENT))
                    deps.append(
                        '{indent}{{{{native}}}}toolchain  # [win]'.format(
                            indent=INDENT))
                    deps.append(
                        '{indent}gcc                  # [not win]'.format(
                            indent=INDENT))
                elif dep_type == 'run':
                    deps.append(
                        '{indent}{{{{native}}}}gcc-libs   # [win]'.format(
                            indent=INDENT))
                    deps.append(
                        '{indent}libgcc               # [not win]'.format(
                            indent=INDENT))
            d['%s_depends' % dep_type] = ''.join(deps)

    for package in package_dicts:
        d = package_dicts[package]
        name = d['packagename']

        # Normalize the metadata values
        d = {
            k: unicodedata.normalize("NFKD",
                                     text_type(v)).encode('ascii',
                                                          'ignore').decode()
            for k, v in iteritems(d)
        }

        makedirs(join(output_dir, name))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, name, 'meta.yaml'), 'w') as f:
            f.write(clear_trailing_whitespace(CRAN_META.format(**d)))
        with open(join(output_dir, name, 'build.sh'), 'w') as f:
            f.write(CRAN_BUILD_SH.format(**d))
        with open(join(output_dir, name, 'bld.bat'), 'w') as f:
            f.write(CRAN_BLD_BAT.format(**d))

    print("Done")
Beispiel #23
0
def skeletonize(packages,
                output_dir=".",
                version=None,
                recursive=False,
                all_urls=False,
                pypi_url='https://pypi.io/pypi/',
                noprompt=True,
                version_compare=False,
                python_version=default_python,
                manual_url=False,
                all_extras=False,
                noarch_python=False,
                config=None,
                setup_options=None,
                extra_specs=[],
                pin_numpy=False):
    package_dicts = {}

    if not setup_options:
        setup_options = []

    if isinstance(setup_options, string_types):
        setup_options = [setup_options]

    if not config:
        config = Config()

    created_recipes = []
    while packages:
        package = packages.pop()
        created_recipes.append(package)

        is_url = ':' in package

        if is_url:
            package_pypi_url = ''
        else:
            package_pypi_url = urljoin(pypi_url, '/'.join((package, 'json')))

        if not is_url:
            dir_path = join(output_dir, package.lower())
            if exists(dir_path) and not version_compare:
                raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(
            package, {
                'packagename': package.lower(),
                'run_depends': '',
                'build_depends': '',
                'entry_points': '',
                'test_commands': '',
                'tests_require': '',
            })
        if is_url:
            del d['packagename']

        if is_url:
            d['version'] = 'UNKNOWN'
            # Make sure there is always something to pass in for this
            pypi_data = {}
        else:
            sort_by_version = lambda l: sorted(l, key=parse_version)

            pypi_resp = requests.get(package_pypi_url,
                                     verify=not _ssl_no_verify())

            if pypi_resp.status_code != 200:
                sys.exit("Request to fetch %s failed with status: %d" %
                         (package_pypi_url, pypi_resp.status_code))

            pypi_data = pypi_resp.json()

            versions = sort_by_version(pypi_data['releases'].keys())

            if version_compare:
                version_compare(versions)
            if version:
                if version not in versions:
                    sys.exit(
                        "Error: Version %s of %s is not available on PyPI." %
                        (version, package))
                d['version'] = version
            else:
                # select the most visible version from PyPI.
                if not versions:
                    sys.exit(
                        "Error: Could not find any versions of package %s" %
                        package)
                if len(versions) > 1:
                    print("Warning, the following versions were found for %s" %
                          package)
                    for ver in versions:
                        print(ver)
                    print("Using %s" % versions[-1])
                    print("Use --version to specify a different version.")
                d['version'] = versions[-1]

        data, d['pypiurl'], d['filename'], d['digest'] = get_download_data(
            pypi_data, package, d['version'], is_url, all_urls, noprompt,
            manual_url)

        d['import_tests'] = ''

        # Get summary directly from the metadata returned
        # from PyPI. summary will be pulled from package information in
        # get_package_metadata or a default value set if it turns out that
        # data['summary'] is empty.  Ignore description as it is too long.
        d['summary'] = data.get('summary', '')
        get_package_metadata(package,
                             d,
                             data,
                             output_dir,
                             python_version,
                             all_extras,
                             recursive,
                             created_recipes,
                             noarch_python,
                             noprompt,
                             packages,
                             extra_specs,
                             config=config,
                             setup_options=setup_options)

        # Set these *after* get_package_metadata so that the preferred hash
        # can be calculated from the downloaded file, if necessary.
        d['hash_type'] = d['digest'][0]
        d['hash_value'] = d['digest'][1]

        # Change requirements to use format that guarantees the numpy
        # version will be pinned when the recipe is built and that
        # the version is included in the build string.
        if pin_numpy:
            for depends in ['build_depends', 'run_depends']:
                deps = d[depends]
                numpy_dep = [
                    idx for idx, dep in enumerate(deps) if 'numpy' in dep
                ]
                if numpy_dep:
                    # Turns out this needs to be inserted before the rest
                    # of the numpy spec.
                    deps.insert(numpy_dep[0], 'numpy x.x')
                    d[depends] = deps

    for package in package_dicts:
        d = package_dicts[package]
        name = d['packagename']
        makedirs(join(output_dir, name))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, name, 'meta.yaml'), 'w') as f:
            rendered_recipe = PYPI_META_HEADER.format(**d)

            ordered_recipe = OrderedDict()
            # Create all keys in expected ordered
            for key in EXPECTED_SECTION_ORDER:
                try:
                    ordered_recipe[key] = PYPI_META_STATIC[key]
                except KeyError:
                    ordered_recipe[key] = OrderedDict()

            if '://' not in pypi_url:
                raise ValueError(
                    "pypi_url must have protocol (e.g. http://) included")
            base_url = urlsplit(pypi_url)
            base_url = "://".join((base_url.scheme, base_url.netloc))
            ordered_recipe['source']['url'] = urljoin(
                base_url, ordered_recipe['source']['url'])
            ordered_recipe['source']['sha256'] = d['hash_value']

            if d['entry_points']:
                ordered_recipe['build']['entry_points'] = d['entry_points']

            if noarch_python:
                ordered_recipe['build']['noarch'] = 'python'

            ordered_recipe['build']['script'] = (
                '"{{ PYTHON }} -m pip install . --no-deps '
                '--ignore-installed --no-cache-dir -vvv ' +
                ' '.join(setup_options) + '"')

            # Always require python as a dependency.  Pip is because we use pip for
            #    the install line.
            ordered_recipe['requirements'] = OrderedDict()
            ordered_recipe['requirements']['host'] = sorted(
                ['python', 'pip'] + list(set(d['build_depends'])))
            ordered_recipe['requirements']['run'] = sorted(
                ['python'] + list(set(d['run_depends'])))

            if d['import_tests']:
                ordered_recipe['test']['imports'] = d['import_tests']

            if d['test_commands']:
                ordered_recipe['test']['commands'] = d['test_commands']

            if d['tests_require']:
                ordered_recipe['test']['requires'] = d['tests_require']

            ordered_recipe['about'] = OrderedDict()

            for key in ABOUT_ORDER:
                try:
                    ordered_recipe['about'][key] = d[key]
                except KeyError:
                    ordered_recipe['about'][key] = ''
            ordered_recipe['extra']['recipe-maintainers'] = [
                'your-github-id-here'
            ]

            # Prune any top-level sections that are empty
            rendered_recipe += _print_dict(ordered_recipe,
                                           EXPECTED_SECTION_ORDER)

            # make sure that recipe ends with one newline, by god.
            rendered_recipe.rstrip()

            # This hackery is necessary because
            #  - the default indentation of lists is not what we would like.
            #    Ideally we'd contact the ruamel.yaml author to find the right
            #    way to do this. See this PR thread for more:
            #    https://github.com/conda/conda-build/pull/2205#issuecomment-315803714
            #    Brute force fix below.

            # Fix the indents
            recipe_lines = []
            for line in rendered_recipe.splitlines():
                match = re.search('^\s+(-) ', line, flags=re.MULTILINE)
                if match:
                    pre, sep, post = line.partition('-')
                    sep = '  ' + sep
                    line = pre + sep + post
                recipe_lines.append(line)
            rendered_recipe = '\n'.join(recipe_lines)

            f.write(rendered_recipe)
Beispiel #24
0
def create_migration_yaml_creator(migrators: MutableSequence[Migrator], gx: nx.DiGraph):
    with indir(os.environ["CONDA_PREFIX"]):
        pinnings = parse_config_file(
            "conda_build_config.yaml", config=Config(**CB_CONFIG),
        )
    feedstocks_to_be_repinned = []
    for k, package_pin_list in pinnings.items():
        # we need the package names for the migrator itself but need the
        # feedstock for everything else
        package_name = k
        # exclude non-package keys
        if k not in gx.nodes and k not in gx.graph["outputs_lut"]:
            # conda_build_config.yaml can't have `-` unlike our package names
            k = k.replace("_", "-")
        # replace sub-packages with their feedstock names
        k = gx.graph["outputs_lut"].get(k, k)

        if (
            (k in gx.nodes)
            and not gx.nodes[k]["payload"].get("archived", False)
            and gx.nodes[k]["payload"].get("version")
            and k not in feedstocks_to_be_repinned
        ):

            current_pins = list(map(str, package_pin_list))
            current_version = str(gx.nodes[k]["payload"]["version"])

            # we need a special parsing for pinning stuff
            meta_yaml = parse_meta_yaml(
                gx.nodes[k]["payload"]["raw_meta_yaml"], for_pinning=True,
            )

            # find the most stringent max pin for this feedstock if any
            pin_spec = ""
            for block in [meta_yaml] + meta_yaml.get("outputs", []) or []:
                build = block.get("build", {}) or {}
                # and check the exported package is within the feedstock
                exports = [
                    p.get("max_pin", "")
                    for p in build.get("run_exports", [{}])
                    # make certain not direct hard pin
                    if isinstance(p, MutableMapping)
                    # if the pinned package is in an output of the parent feedstock
                    and (
                        gx.graph["outputs_lut"].get(p.get("package_name", ""), "") == k
                        # if the pinned package is the feedstock itself
                        or p.get("package_name", "") == k
                    )
                ]
                if not exports:
                    continue
                # get the most stringent pin spec from the recipe block
                max_pin = max(exports, key=len)
                if len(max_pin) > len(pin_spec):
                    pin_spec = max_pin

            # fall back to the pinning file or "x"
            if not pin_spec:
                pin_spec = (
                    pinnings["pin_run_as_build"].get(k, {}).get("max_pin", "x") or "x"
                )

            current_pins = list(
                map(lambda x: re.sub("[^0-9.]", "", x).rstrip("."), current_pins),
            )
            current_version = re.sub("[^0-9.]", "", current_version).rstrip(".")
            if current_pins == [""]:
                continue

            current_pin = str(max(map(VersionOrder, current_pins)))
            # If the current pin and the current version is the same nothing
            # to do even if the pin isn't accurate to the spec
            if current_pin != current_version and _outside_pin_range(
                pin_spec, current_pin, current_version,
            ):
                feedstocks_to_be_repinned.append(k)
                print(package_name, current_version, current_pin, pin_spec)
                migrators.append(
                    MigrationYamlCreator(
                        package_name, current_version, current_pin, pin_spec, k, gx,
                    ),
                )
Beispiel #25
0
def config():
    """a tiny bit of a fixture to save us from manually creating a new Config each test"""
    return Config()
Beispiel #26
0
def skeletonize(packages, output_dir=".", version=None, recursive=False,
                all_urls=False, pypi_url='https://pypi.python.org/pypi', noprompt=False,
                version_compare=False, python_version=default_python, manual_url=False,
                all_extras=False, noarch_python=False, config=None, setup_options=None,
                pin_numpy=False):
    client = get_xmlrpc_client(pypi_url)
    package_dicts = {}

    if not setup_options:
        setup_options = []

    if isinstance(setup_options, string_types):
        setup_options = [setup_options]

    if not config:
        config = Config()

    # all_packages = client.list_packages()
    # searching is faster than listing all packages, but we need to separate URLs from names
    all_packages = []

    urls = [package for package in packages if ':' in package]
    names = [package for package in packages if package not in urls]
    all_packages = urls + [match["name"] for match in client.search({"name": names}, "or")]
    all_packages_lower = [i.lower() for i in all_packages]

    created_recipes = []
    while packages:
        package = packages.pop()
        created_recipes.append(package)

        is_url = ':' in package

        if not is_url:
            dir_path = join(output_dir, package.lower())
            if exists(dir_path) and not version_compare:
                raise RuntimeError("directory already exists: %s" % dir_path)
        d = package_dicts.setdefault(package,
            {
                'packagename': package.lower(),
                'run_depends': '',
                'build_depends': '',
                'entry_points': '',
                'build_comment': '# ',
                'noarch_python_comment': '# ',
                'test_commands': '',
                'requires_comment': '#',
                'tests_require': '',
                'usemd5': '',
                'test_comment': '',
                'entry_comment': '# ',
                'egg_comment': '# ',
                'summary_comment': '',
                'home_comment': '',
            })
        if is_url:
            del d['packagename']

        if is_url:
            d['version'] = 'UNKNOWN'
        else:
            versions = sorted(client.package_releases(package, True), key=parse_version)
            if version_compare:
                version_compare(versions)
            if version:
                if version not in versions:
                    sys.exit("Error: Version %s of %s is not available on PyPI."
                             % (version, package))
                d['version'] = version
            else:
                if not versions:
                    # The xmlrpc interface is case sensitive, but the index itself
                    # is apparently not (the last time I checked,
                    # len(set(all_packages_lower)) == len(set(all_packages)))
                    if package.lower() in all_packages_lower:
                        cased_package = all_packages[all_packages_lower.index(package.lower())]
                        if cased_package != package:
                            print("%s not found, trying %s" % (package, cased_package))
                            packages.append(cased_package)
                            del package_dicts[package]
                            continue
                    sys.exit("Error: Could not find any versions of package %s" % package)
                if len(versions) > 1:
                    print("Warning, the following versions were found for %s" %
                          package)
                    for ver in versions:
                        print(ver)
                    print("Using %s" % versions[-1])
                    print("Use --version to specify a different version.")
                d['version'] = versions[-1]

        data, d['pypiurl'], d['filename'], d['md5'] = get_download_data(client,
                                                                        package,
                                                                        d['version'],
                                                                        is_url, all_urls,
                                                                        noprompt, manual_url)

        if d['md5'] == '':
            d['usemd5'] = '# '
        else:
            d['usemd5'] = ''

        d['import_tests'] = ''

        get_package_metadata(package, d, data, output_dir, python_version,
                             all_extras, recursive, created_recipes, noarch_python,
                             noprompt, packages, config=config, setup_options=setup_options)

        if d['import_tests'] == '':
            d['import_comment'] = '# '
        else:
            d['import_comment'] = ''
            d['import_tests'] = INDENT + d['import_tests']

        if d['tests_require'] == '':
            d['requires_comment'] = '# '
        else:
            d['requires_comment'] = ''
            d['tests_require'] = INDENT + d['tests_require']

        if d['entry_comment'] == d['import_comment'] == '# ':
            d['test_comment'] = '# '

        d['recipe_setup_options'] = ' '.join(setup_options)

        # Change requirements to use format that guarantees the numpy
        # version will be pinned when the recipe is built and that
        # the version is included in the build string.
        if pin_numpy:
            for depends in ['build_depends', 'run_depends']:
                deps = d[depends].split(INDENT)
                numpy_dep = [idx for idx, dep in enumerate(deps)
                             if 'numpy' in dep]
                if numpy_dep:
                    # Turns out this needs to be inserted before the rest
                    # of the numpy spec.
                    deps.insert(numpy_dep[0], 'numpy x.x')
                    d[depends] = INDENT.join(deps)

    for package in package_dicts:
        d = package_dicts[package]
        name = d['packagename']
        makedirs(join(output_dir, name))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, name, 'meta.yaml'), 'w') as f:
            f.write(PYPI_META.format(**d))
        with open(join(output_dir, name, 'build.sh'), 'w') as f:
            f.write(PYPI_BUILD_SH.format(**d))
        with open(join(output_dir, name, 'bld.bat'), 'w') as f:
            f.write(PYPI_BLD_BAT.format(**d))
Beispiel #27
0
def execute(args):
    parser, args = parse_args(args)
    config = Config(**args.__dict__)
    build.check_external()

    # change globals in build module, see comment there as well
    config.channel_urls = args.channel or ()
    config.override_channels = args.override_channels
    config.verbose = not args.quiet or args.debug

    if 'purge' in args.recipe:
        build.clean_build(config)
        return

    if 'purge-all' in args.recipe:
        build.clean_build(config)
        config.clean_pkgs()
        return

    set_language_env_vars(args, parser, config=config, execute=execute)

    action = None
    if args.output:
        action = output_action
        logging.basicConfig(level=logging.ERROR)
        config.verbose = False
        config.quiet = True
    elif args.test:
        action = test_action
    elif args.source:
        action = source_action
    elif args.check:
        action = check_action

    if action:
        for recipe in args.recipe:
            recipe_dir, need_cleanup = get_recipe_abspath(recipe)

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            # this fully renders any jinja templating, throwing an error if any data is missing
            m, _, _ = render_recipe(recipe_dir,
                                    no_download_source=False,
                                    config=config)
            action(m, config)

            if need_cleanup:
                rm_rf(recipe_dir)
    else:
        api.build(args.recipe,
                  post=args.post,
                  build_only=args.build_only,
                  notest=args.notest,
                  keep_old_work=args.keep_old_work,
                  already_built=None,
                  config=config,
                  noverify=args.no_verify)

    if not args.output and len(build.get_build_folders(config.croot)) > 0:
        build.print_build_intermediate_warning(config)
Beispiel #28
0
    elif 'hg_url' in meta:
        hg_source(meta, config=config)
    elif 'svn_url' in meta:
        svn_source(meta, config=config)
    elif 'path' in meta:
        path = normpath(abspath(join(recipe_dir, meta.get('path'))))
        if config.verbose:
            print("Copying %s to %s" % (path, config.work_dir))
        # careful here: we set test path to be outside of conda-build root in setup.cfg.
        #    If you don't do that, this is a recursive function
        copy_into(path, config.work_dir, config.timeout)
    else:  # no source
        if not isdir(config.work_dir):
            os.makedirs(config.work_dir)

    if patch:
        src_dir = config.work_dir
        patches = ensure_list(meta.get('patches', []))
        for patch in patches:
            apply_patch(src_dir, join(recipe_dir, patch), config, git)

    return config.work_dir


if __name__ == '__main__':
    from conda_build.config import Config
    print(provide('.',
                  {'url': 'http://pypi.python.org/packages/source/b/bitarray/bitarray-0.8.0.tar.gz',
                   'git_url': '[email protected]:ilanschnell/bitarray.git',
                   'git_tag': '0.5.2'}), Config())
Beispiel #29
0
def execute(args):
    _parser, args = parse_args(args)
    config = Config(**args.__dict__)
    build.check_external()

    # change globals in build module, see comment there as well
    channel_urls = args.__dict__.get('channel') or args.__dict__.get(
        'channels') or ()
    config.channel_urls = []

    for url in channel_urls:
        # allow people to specify relative or absolute paths to local channels
        #    These channels still must follow conda rules - they must have the
        #    appropriate platform-specific subdir (e.g. win-64)
        if os.path.isdir(url):
            if not os.path.isabs(url):
                url = os.path.normpath(
                    os.path.abspath(os.path.join(os.getcwd(), url)))
            url = url_path(url)
        config.channel_urls.append(url)

    config.override_channels = args.override_channels
    config.verbose = not args.quiet or args.debug

    if 'purge' in args.recipe:
        build.clean_build(config)
        return

    if 'purge-all' in args.recipe:
        build.clean_build(config)
        config.clean_pkgs()
        return

    action = None
    outputs = None
    if args.output:
        action = output_action
        config.verbose = False
        config.quiet = True
        config.debug = False
    elif args.test:
        action = test_action
    elif args.source:
        action = source_action
    elif args.check:
        action = check_action

    if action == test_action:
        failed_recipes = []
        recipes = [
            item for sublist in [
                glob(os.path.abspath(recipe)) if '*' in recipe else [recipe]
                for recipe in args.recipe
            ] for item in sublist
        ]
        for recipe in recipes:
            try:
                action(recipe, config)
            except:
                if not args.keep_going:
                    raise
                else:
                    failed_recipes.append(recipe)
                    continue
        if failed_recipes:
            print("Failed recipes:")
            for recipe in failed_recipes:
                print("  - %s" % recipe)
            sys.exit(len(failed_recipes))
        else:
            print("All tests passed")
        outputs = []

    elif action:
        outputs = [action(recipe, config) for recipe in args.recipe]
    else:
        outputs = api.build(args.recipe,
                            post=args.post,
                            build_only=args.build_only,
                            notest=args.notest,
                            already_built=None,
                            config=config,
                            verify=args.verify,
                            variants=args.variants)

    if not args.output and len(utils.get_build_folders(config.croot)) > 0:
        build.print_build_intermediate_warning(config)
    return outputs
Beispiel #30
0
import os
import shutil
from conda_build.config import Config

build_dir = Config().bldpkgs_dir
dest_dir = 'dist'

if not os.path.exists(dest_dir):
    os.mkdir(dest_dir)

for p in os.listdir(build_dir):
    if not p.endswith('.tar.bz2'):
        continue

    source = os.path.join(build_dir, p)
    print('copying %s' % source)
    shutil.copyfile(source, os.path.join(dest_dir, p))