Example #1
0
def execute(args):
    p, args = parse_args(args)

    config = get_or_merge_config(None, **args.__dict__)
    variants = get_package_variants(args.recipe, config)
    set_language_env_vars(variants)

    channel_urls = args.__dict__.get('channel') or args.__dict__.get('channels') or ()
    config.channel_urls = []

    for url in channel_urls:
        # allow people to specify relative or absolute paths to local channels
        #    These channels still must follow conda rules - they must have the
        #    appropriate platform-specific subdir (e.g. win-64)
        if os.path.isdir(url):
            if not os.path.isabs(url):
                url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url)))
            url = url_path(url)
        config.channel_urls.append(url)

    config.override_channels = args.override_channels

    metadata_tuples = api.render(args.recipe, config=config,
                                 no_download_source=args.no_source)

    if args.output:
        with LoggingContext(logging.CRITICAL + 1):
            config.verbose = False
            config.debug = False
            paths = api.get_output_file_paths(metadata_tuples, config=config)
            print('\n'.join(sorted(paths)))
    else:
        logging.basicConfig(level=logging.INFO)
        for (m, _, _) in metadata_tuples:
            print(api.output_yaml(m, args.file))
Example #2
0
def render_recipe(recipe_path, config, no_download_source=False, variants=None,
                  permit_unsatisfiable_variants=True, reset_build_id=True, expand_output=False):
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return None, None
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    if config.set_build_id:
        m.config.compute_build_id(m.name(), reset=reset_build_id)

    if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or
                                      len(os.listdir(m.config.work_dir)) == 0):
        try_download(m, no_download_source=no_download_source)

    rendered_metadata = {}

    if m.final:
        rendered_metadata = [(m, False, False), ]
        index = None
    else:
        variants = (dict_of_lists_to_list_of_dicts(variants, m.config.platform)
                    if variants else get_package_variants(m, m.config))
        index = get_build_index(m.config, m.config.build_subdir)
        rendered_metadata = distribute_variants(m, variants, index,
                                        permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        if not rendered_metadata:
            raise ValueError("No variants were satisfiable - no valid recipes could be rendered.")

    if expand_output:
        rendered_metadata = expand_outputs(rendered_metadata, index)

    if need_cleanup:
        utils.rm_rf(recipe_dir)

    return rendered_metadata, index
Example #3
0
def execute(args, print_results=True):
    p, args = parse_args(args)

    config = get_or_merge_config(None, **args.__dict__)

    variants = get_package_variants(args.recipe,
                                    config,
                                    variants=args.variants)
    set_language_env_vars(variants)

    channel_urls = args.__dict__.get('channel') or args.__dict__.get(
        'channels') or ()
    config.channel_urls = []

    for url in channel_urls:
        # allow people to specify relative or absolute paths to local channels
        #    These channels still must follow conda rules - they must have the
        #    appropriate platform-specific subdir (e.g. win-64)
        if os.path.isdir(url):
            if not os.path.isabs(url):
                url = os.path.normpath(
                    os.path.abspath(os.path.join(os.getcwd(), url)))
            url = url_path(url)
        config.channel_urls.append(url)

    config.override_channels = args.override_channels

    if args.output:
        config.verbose = False
        config.debug = False

    metadata_tuples = api.render(args.recipe,
                                 config=config,
                                 no_download_source=args.no_source,
                                 variants=args.variants)

    if print_results:
        if args.output:
            with LoggingContext(logging.CRITICAL + 1):
                paths = api.get_output_file_paths(metadata_tuples,
                                                  config=config)
                print('\n'.join(sorted(paths)))
        else:
            logging.basicConfig(level=logging.INFO)
            for (m, _, _) in metadata_tuples:
                print("--------------")
                print("Hash contents:")
                print("--------------")
                pprint(m.get_hash_contents())
                print("----------")
                print("meta.yaml:")
                print("----------")
                print(api.output_yaml(m, args.file, suppress_outputs=True))
    else:
        return metadata_tuples
Example #4
0
def execute(args, print_results=True):
    p, args = parse_args(args)

    config = get_or_merge_config(None, **args.__dict__)

    variants = get_package_variants(args.recipe,
                                    config,
                                    variants=args.variants)
    set_language_env_vars(variants)

    config.channel_urls = get_channel_urls(args.__dict__)

    config.override_channels = args.override_channels

    if args.output:
        config.verbose = False
        config.debug = False

    metadata_tuples = api.render(args.recipe,
                                 config=config,
                                 no_download_source=args.no_source,
                                 variants=args.variants)

    if args.file and len(metadata_tuples) > 1:
        log.warning(
            "Multiple variants rendered. "
            "Only one will be written to the file you specified ({}).".format(
                args.file))

    if print_results:
        if args.output:
            with LoggingContext(logging.CRITICAL + 1):
                paths = api.get_output_file_paths(metadata_tuples,
                                                  config=config)
                print('\n'.join(sorted(paths)))
            if args.file:
                m = metadata_tuples[-1][0]
                api.output_yaml(m, args.file, suppress_outputs=True)
        else:
            logging.basicConfig(level=logging.INFO)
            for (m, _, _) in metadata_tuples:
                print("--------------")
                print("Hash contents:")
                print("--------------")
                pprint(m.get_hash_contents())
                print("----------")
                print("meta.yaml:")
                print("----------")
                print(api.output_yaml(m, args.file, suppress_outputs=True))
    else:
        return metadata_tuples
Example #5
0
def get_deps_and_metadata(path, python=None, numpy=None):
    """
    Extract all dependencies from the recipe in path.

    Parameters
    ----------
    path: string
        The absolute path to the recipe dir
    python: list, optional
        A list of python version strings, e.g ['3.6']
    numpy: list, optional
        A list of numpy version strings, e.g ['1.14']

    Returns
    -------
    deps: list
        A list of MetaData
    """
    # this is how different python and numpy versions are supported internally
    # in conda build. Note that actually "numpy" here can only be a version
    # string or a 1-element list, so was the old implementation (and also
    # conda build --numpy ...)
    variants = {}
    if python is not None:
        variants['python'] = python
    if numpy is not None:
        variants['numpy'] = numpy
    if variants == {}:
        variants = None

    # we want "--old-build-string" for conda build
    # this is how it's done internally
    config = get_or_merge_config(None)
    config.filename_hashing = False

    # Here we need to set allow_no_other_outputs so that the outputs section
    # does not get expanded too early, which would otherwise cause the "bug
    # in conda-build" error!
    # result is a list of 3-tuple
    meta = MetaData(path, config=config)
    var = get_package_variants(meta, variants=variants)
    result = distribute_variants(meta, var, allow_no_other_outputs=True)

    deps = []
    for meta, p, q in result:  # p, q are unimportant bools
        deps.append(meta)

    return deps
Example #6
0
def execute(args):
    p, args = parse_args(args)

    config = get_or_merge_config(None, **args.__dict__)
    variants = get_package_variants(args.recipe, config)
    set_language_env_vars(variants)

    metadata_tuples = api.render(args.recipe, config=config,
                                 no_download_source=args.no_source)
    if args.output:
        with LoggingContext(logging.CRITICAL + 1):
            paths = api.get_output_file_path(metadata_tuples)
            print('\n'.join(paths))
    else:
        logging.basicConfig(level=logging.INFO)
        for (m, _, _) in metadata_tuples:
            print(api.output_yaml(m, args.file))
Example #7
0
def execute(args):
    p, args = parse_args(args)

    config = get_or_merge_config(None, **args.__dict__)
    variants = get_package_variants(args.recipe, config)
    set_language_env_vars(variants)

    metadata_tuples = api.render(args.recipe, config=config,
                                 no_download_source=args.no_source)

    if args.output:
        with LoggingContext(logging.CRITICAL + 1):
            paths = api.get_output_file_paths(metadata_tuples)
            print('\n'.join(sorted(paths)))
    else:
        logging.basicConfig(level=logging.INFO)
        for (m, _, _) in metadata_tuples:
            print(api.output_yaml(m, args.file))
Example #8
0
def execute(args, print_results=True):
    p, args = parse_args(args)

    config = get_or_merge_config(None, **args.__dict__)

    variants = get_package_variants(args.recipe, config, variants=args.variants)
    set_language_env_vars(variants)

    config.channel_urls = get_channel_urls(args.__dict__)

    config.override_channels = args.override_channels

    if args.output:
        config.verbose = False
        config.debug = False

    metadata_tuples = api.render(args.recipe, config=config,
                                 no_download_source=args.no_source,
                                 variants=args.variants)

    if print_results:
        if args.output:
            with LoggingContext(logging.CRITICAL + 1):
                paths = api.get_output_file_paths(metadata_tuples, config=config)
                print('\n'.join(sorted(paths)))
        else:
            logging.basicConfig(level=logging.INFO)
            for (m, _, _) in metadata_tuples:
                print("--------------")
                print("Hash contents:")
                print("--------------")
                pprint(m.get_hash_contents())
                print("----------")
                print("meta.yaml:")
                print("----------")
                print(api.output_yaml(m, args.file, suppress_outputs=True))
    else:
        return metadata_tuples
Example #9
0
def render_recipe(recipe_path,
                  config,
                  no_download_source=False,
                  variants=None,
                  permit_unsatisfiable_variants=True,
                  reset_build_id=True,
                  bypass_env_check=False):
    """Returns a list of tuples, each consisting of

    (metadata-object, needs_download, needs_render_in_env)

    You get one tuple per variant.  Outputs are not factored in here (subpackages won't affect these
    results returned here.)
    """
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return None, None
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    rendered_metadata = {}

    # important: set build id *before* downloading source.  Otherwise source goes into a different
    #    build folder.
    if config.set_build_id:
        m.config.compute_build_id(m.name(), m.version(), reset=reset_build_id)

    # this source may go into a folder that doesn't match the eventual build folder.
    #   There's no way around it AFAICT.  We must download the source to be able to render
    #   the recipe (from anything like GIT_FULL_HASH), but we can't know the final build
    #   folder until rendering is complete, because package names can have variant jinja2 in them.
    if m.needs_source_for_render and not m.source_provided:
        try_download(m, no_download_source=no_download_source)
    if m.final:
        if not hasattr(m.config, 'variants') or not m.config.variant:
            m.config.ignore_system_variants = True
            if os.path.isfile(os.path.join(m.path, 'conda_build_config.yaml')):
                m.config.variant_config_files = [
                    os.path.join(m.path, 'conda_build_config.yaml')
                ]
            m.config.variants = get_package_variants(m, variants=variants)
            m.config.variant = m.config.variants[0]
        rendered_metadata = [
            (m, False, False),
        ]
    else:
        # merge any passed-in variants with any files found
        variants = get_package_variants(m, variants=variants)

        # when building, we don't want to fully expand all outputs into metadata, only expand
        #    whatever variants we have (i.e. expand top-level variants, not output-only variants)
        rendered_metadata = distribute_variants(
            m,
            variants,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants,
            allow_no_other_outputs=True,
            bypass_env_check=bypass_env_check)
    if need_cleanup:
        utils.rm_rf(recipe_dir)
    return rendered_metadata
Example #10
0
def test_use_selectors_in_variants(testing_workdir, testing_config):
    testing_config.variant_config_files = [
        os.path.join(recipe_dir, 'selector_conda_build_config.yaml')
    ]
    variants.get_package_variants(testing_workdir, testing_config)
Example #11
0
def test_use_selectors_in_variants(testing_workdir, testing_config):
    testing_config.variant_config_files = [os.path.join(recipe_dir,
                                                        'selector_conda_build_config.yaml')]
    variants.get_package_variants(testing_workdir, testing_config)
Example #12
0
def render_recipe(recipe_path,
                  config,
                  no_download_source=False,
                  variants=None,
                  permit_unsatisfiable_variants=True,
                  reset_build_id=True,
                  bypass_env_check=False):
    """Returns a list of tuples, each consisting of

    (metadata-object, needs_download, needs_render_in_env)

    You get one tuple per variant.  Outputs are not factored in here (subpackages won't affect these
    results returned here.)
    """
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return None, None
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    rendered_metadata = {}

    # important: set build id *before* downloading source.  Otherwise source goes into a different
    #    build folder.
    if config.set_build_id:
        m.config.compute_build_id(m.name(), reset=reset_build_id)

    # this source may go into a folder that doesn't match the eventual build folder.
    #   There's no way around it AFAICT.  We must download the source to be able to render
    #   the recipe (from anything like GIT_FULL_HASH), but we can't know the final build
    #   folder until rendering is complete, because package names can have variant jinja2 in them.
    if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or
                                      len(os.listdir(m.config.work_dir)) == 0):
        try_download(m, no_download_source=no_download_source)

    if m.final:
        if not hasattr(m.config, 'variants'):
            m.config.variants = [m.config.variant]
        rendered_metadata = [
            (m, False, False),
        ]
    else:
        index, index_ts = get_build_index(
            m.config.build_subdir,
            bldpkgs_dir=m.config.bldpkgs_dir,
            output_folder=m.config.output_folder,
            channel_urls=m.config.channel_urls,
            omit_defaults=m.config.override_channels,
            debug=m.config.debug,
            verbose=m.config.verbose,
            locking=m.config.locking,
            timeout=m.config.timeout)
        # when building, we don't want to fully expand all outputs into metadata, only expand
        #    whatever variants we have.
        variants = (dict_of_lists_to_list_of_dicts(variants)
                    if variants else get_package_variants(m))
        rendered_metadata = distribute_variants(
            m,
            variants,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants,
            allow_no_other_outputs=True,
            bypass_env_check=bypass_env_check)

    if need_cleanup:
        utils.rm_rf(recipe_dir)

    return rendered_metadata
def collect_tasks(path,
                  folders,
                  matrix_base_dir,
                  channels=None,
                  steps=0,
                  test=False,
                  max_downstream=5,
                  variant_config_files=None,
                  platform_filters=None,
                  clobber_sections_file=None,
                  append_sections_file=None,
                  pass_throughs=None,
                  skip_existing=True,
                  build_config_vars={}):
    """ Return a graph of build tasks """
    task_graph = nx.DiGraph()
    parsed_cli_args = _parse_python_numpy_from_pass_throughs(pass_throughs)
    config = conda_build.api.Config(
        clobber_sections_file=clobber_sections_file,
        append_sections_file=append_sections_file,
        skip_existing=skip_existing,
        **parsed_cli_args,
    )
    platform_filters = ensure_list(platform_filters) if platform_filters else [
        '*'
    ]
    platforms = parse_platforms(matrix_base_dir, platform_filters,
                                build_config_vars)
    # loop over platforms here because each platform may have different dependencies
    # each platform will be submitted with a different label
    for platform in platforms:
        subdir = f"{platform['platform']}-{platform['arch']}"
        config.variants = get_package_variants(path, config,
                                               platform.get('variants'))
        config.channel_urls = channels or []
        config.variant_config_files = variant_config_files or []
        conda_resolve = Resolve(
            get_build_index(subdir=subdir,
                            bldpkgs_dir=config.bldpkgs_dir,
                            channel_urls=channels)[0])
        # this graph is potentially different for platform and for build or test mode ("run")
        graph = construct_graph(
            path,
            worker=platform,
            folders=folders,
            run="build",
            matrix_base_dir=matrix_base_dir,
            conda_resolve=conda_resolve,
            config=config,
        )
        # Apply the build label to any nodes that need (re)building or testing
        expand_run(
            graph,
            config=config.copy(),
            conda_resolve=conda_resolve,
            worker=platform,
            run="build",
            steps=steps,
            max_downstream=max_downstream,
            recipes_dir=path,
            matrix_base_dir=matrix_base_dir,
        )
        # merge this graph with the main one
        task_graph = nx.compose(task_graph, graph)
    collapse_noarch_python_nodes(task_graph)
    return task_graph
Example #14
0
def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=None, version=None,
                git_tag=None, cran_url=None, recursive=False, archive=True,
                version_compare=False, update_policy='', r_interp='r-base', use_binaries_ver=None,
                use_noarch_generic=False, use_rtools_win=False, config=None,
                variant_config_files=None):

    output_dir = realpath(output_dir)
    config = get_or_merge_config(config, variant_config_files=variant_config_files)

    if not cran_url:
        with TemporaryDirectory() as t:
            _variant = get_package_variants(t, config)[0]
        cran_url = ensure_list(_variant.get('cran_mirror', DEFAULT_VARIANTS['cran_mirror']))[0]

    if len(in_packages) > 1 and version_compare:
        raise ValueError("--version-compare only works with one package at a time")
    if update_policy == 'error' and not in_packages:
        raise ValueError("At least one package must be supplied")

    package_dicts = {}
    package_list = []

    cran_url = cran_url.rstrip('/')
    cran_metadata = get_cran_metadata(cran_url, output_dir)

    # r_recipes_in_output_dir = []
    # recipes = listdir(output_dir)
    # for recipe in recipes:
    #     if not recipe.startswith('r-') or not isdir(recipe):
    #         continue
    #     r_recipes_in_output_dir.append(recipe)

    for package in in_packages:
        inputs_dict = package_to_inputs_dict(output_dir, output_suffix, git_tag, package)
        if inputs_dict:
            package_dicts.update({inputs_dict['pkg-name']: {'inputs': inputs_dict}})

    for package_name, package_dict in package_dicts.items():
        package_list.append(package_name)

    while package_list:
        inputs = package_dicts[package_list.pop()]['inputs']
        location = inputs['location']
        pkg_name = inputs['pkg-name']
        is_github_url = location and 'github.com' in location
        is_tarfile = location and isfile(location) and tarfile.is_tarfile(location)
        url = inputs['location']

        dir_path = inputs['new-location']
        print("Making/refreshing recipe for {}".format(pkg_name))

        # Bodges GitHub packages into cran_metadata
        if is_github_url or is_tarfile:
            rm_rf(config.work_dir)
            if is_github_url:
                m = metadata.MetaData.fromdict({'source': {'git_url': location}}, config=config)
                source.git_source(m.get_section('source'), m.config.git_cache, m.config.work_dir)
                new_git_tag = git_tag if git_tag else get_latest_git_tag(config)
                p = subprocess.Popen(['git', 'checkout', new_git_tag], stdout=subprocess.PIPE,
                                     stderr=subprocess.PIPE, cwd=config.work_dir)
                stdout, stderr = p.communicate()
                stdout = stdout.decode('utf-8')
                stderr = stderr.decode('utf-8')
                if p.returncode:
                    sys.exit("Error: 'git checkout %s' failed (%s).\nInvalid tag?" %
                             (new_git_tag, stderr.strip()))
                if stdout:
                    print(stdout, file=sys.stdout)
                if stderr:
                    print(stderr, file=sys.stderr)
            else:
                m = metadata.MetaData.fromdict({'source': {'url': location}}, config=config)
                source.unpack(m.get_section('source'), m.config.work_dir, m.config.src_cache,
                              output_dir, m.config.work_dir)
            DESCRIPTION = join(config.work_dir, "DESCRIPTION")
            if not isfile(DESCRIPTION):
                sub_description_pkg = join(config.work_dir, 'pkg', "DESCRIPTION")
                sub_description_name = join(config.work_dir, location.split('/')[-1], "DESCRIPTION")
                if isfile(sub_description_pkg):
                    DESCRIPTION = sub_description_pkg
                elif isfile(sub_description_name):
                    DESCRIPTION = sub_description_name
                else:
                    sys.exit("%s does not appear to be a valid R package "
                             "(no DESCRIPTION file in %s, %s)"
                                 % (location, sub_description_pkg, sub_description_name))

            with open(DESCRIPTION) as f:
                description_text = clear_whitespace(f.read())

            d = dict_from_cran_lines(remove_package_line_continuations(
                description_text.splitlines()))
            d['orig_description'] = description_text
            package = d['Package'].lower()
            cran_metadata[package] = d
        else:
            package = pkg_name

        if pkg_name not in cran_metadata:
            sys.exit("Package %s not found" % pkg_name)

        # Make sure package always uses the CRAN capitalization
        package = cran_metadata[package.lower()]['Package']

        if not is_github_url and not is_tarfile:
            session = get_session(output_dir)
            cran_metadata[package.lower()].update(get_package_metadata(cran_url,
            package, session))

        cran_package = cran_metadata[package.lower()]

        package_dicts[package.lower()].update(
            {
                'cran_packagename': package,
                'packagename': 'r-' + package.lower(),
                'patches': '',
                'build_number': 0,
                'build_depends': '',
                'host_depends': '',
                'run_depends': '',
                # CRAN doesn't seem to have this metadata :(
                'home_comment': '#',
                'homeurl': '',
                'summary_comment': '#',
                'summary': '',
            })
        d = package_dicts[package.lower()]
        d['binary1'] = ''
        d['binary2'] = ''

        if version:
            d['version'] = version
            raise NotImplementedError("Package versions from CRAN are not yet implemented")

        d['cran_version'] = cran_package['Version']
        # Conda versions cannot have -. Conda (verlib) will treat _ as a .
        d['conda_version'] = d['cran_version'].replace('-', '_')
        if version_compare:
            sys.exit(not version_compare(dir_path, d['conda_version']))

        patches = []
        script_env = []
        extra_recipe_maintainers = []
        build_number = 0
        if update_policy.startswith('merge') and inputs['old-metadata']:
            m = inputs['old-metadata']
            patches = make_array(m, 'source/patches')
            script_env = make_array(m, 'build/script_env')
            extra_recipe_maintainers = make_array(m, 'extra/recipe-maintainers', add_maintainer)
            if m.version() == d['conda_version']:
                build_number = int(m.get_value('build/number', 0))
                build_number += 1 if update_policy == 'merge-incr-build-num' else 0
        if add_maintainer:
            new_maintainer = "{indent}{add_maintainer}".format(indent=INDENT,
                                                               add_maintainer=add_maintainer)
            if new_maintainer not in extra_recipe_maintainers:
                if not len(extra_recipe_maintainers):
                    # We hit this case when there is no existing recipe.
                    extra_recipe_maintainers = make_array({}, 'extra/recipe-maintainers', True)
                extra_recipe_maintainers.append(new_maintainer)
        if len(extra_recipe_maintainers):
            extra_recipe_maintainers[1:].sort()
            extra_recipe_maintainers.insert(0, "extra:\n  ")
        d['extra_recipe_maintainers'] = ''.join(extra_recipe_maintainers)
        d['patches'] = ''.join(patches)
        d['script_env'] = ''.join(script_env)
        d['build_number'] = build_number

        cached_path = None
        cran_layout = {'source': {'selector': '{others}',
                                  'dir': 'src/contrib/',
                                  'ext': '.tar.gz',
                                  # If we had platform filters we would change this to:
                                  # build_for_linux or is_github_url or is_tarfile
                                  'use_this': True},
                       'win-64': {'selector': 'win64',
                                  'dir': 'bin/windows/contrib/{}/'.format(use_binaries_ver),
                                  'ext': '.zip',
                                  'use_this': True if use_binaries_ver else False},
                       'osx-64': {'selector': 'osx',
                                  'dir': 'bin/macosx/el-capitan/contrib/{}/'.format(
                                      use_binaries_ver),
                                  'ext': '.tgz',
                                  'use_this': True if use_binaries_ver else False}}
        available = {}
        for archive_type, archive_details in iteritems(cran_layout):
            contrib_url = ''
            if archive_details['use_this']:
                if is_tarfile:
                    filename = basename(location)
                    contrib_url = relpath(location, dir_path)
                    contrib_url_rendered = package_url = contrib_url
                    sha256 = hashlib.sha256()
                    cached_path = location
                elif not is_github_url:
                    filename_rendered = '{}_{}{}'.format(
                        package, d['cran_version'], archive_details['ext'])
                    filename = '{}_{{{{ version }}}}'.format(package) + archive_details['ext']
                    contrib_url = '{{{{ cran_mirror }}}}/{}'.format(archive_details['dir'])
                    contrib_url_rendered = cran_url + '/{}'.format(archive_details['dir'])
                    package_url = contrib_url_rendered + filename_rendered
                    sha256 = hashlib.sha256()
                    print("Downloading {} from {}".format(archive_type, package_url))
                    # We may need to inspect the file later to determine which compilers are needed.
                    cached_path, _ = source.download_to_cache(
                        config.src_cache, '',
                        {'url': package_url, 'fn': archive_type + '-' + filename_rendered})
                available_details = {}
                available_details['selector'] = archive_details['selector']
                if cached_path:
                    sha256.update(open(cached_path, 'rb').read())
                    available_details['filename'] = filename
                    available_details['contrib_url'] = contrib_url
                    available_details['contrib_url_rendered'] = contrib_url_rendered
                    available_details['cranurl'] = package_url
                    available_details['hash_entry'] = 'sha256: {}'.format(sha256.hexdigest())
                    available_details['cached_path'] = cached_path
                # This is rubbish; d[] should be renamed global[] and should be
                #      merged into source and binaryN.
                if archive_type == 'source':
                    if is_github_url:
                        available_details['url_key'] = ''
                        available_details['fn_key'] = ''
                        available_details['git_url_key'] = 'git_url:'
                        available_details['git_tag_key'] = 'git_tag:'
                        hash_msg = '# You can add a hash for the file here, (md5, sha1 or sha256)'
                        available_details['hash_entry'] = hash_msg
                        available_details['filename'] = ''
                        available_details['cranurl'] = ''
                        available_details['git_url'] = url
                        available_details['git_tag'] = new_git_tag
                        available_details['archive_keys'] = ''
                    else:
                        available_details['url_key'] = 'url:'
                        available_details['fn_key'] = 'fn:'
                        available_details['git_url_key'] = ''
                        available_details['git_tag_key'] = ''
                        available_details['cranurl'] = ' ' + contrib_url + filename
                        available_details['git_url'] = ''
                        available_details['git_tag'] = ''
                available_details['patches'] = d['patches']
                available[archive_type] = available_details

        # Figure out the selectors according to what is available.
        _all = ['linux', 'win32', 'win64', 'osx']
        from_source = _all[:]
        binary_id = 1
        for archive_type, archive_details in iteritems(available):
            if archive_type != 'source':
                sel = archive_details['selector']
                from_source.remove(sel)
                binary_id += 1
            else:
                for k, v in iteritems(archive_details):
                    d[k] = v
        if from_source == _all:
            sel_src = ""
            sel_src_and_win = '  # [win]'
            sel_src_not_win = '  # [not win]'
        else:
            sel_src = '  # [' + ' or '.join(from_source) + ']'
            sel_src_and_win = '  # [' + ' or '.join(fs for fs in from_source if
                                                    fs.startswith('win')) + ']'
            sel_src_not_win = '  # [' + ' or '.join(fs for fs in from_source if not
                                                    fs.startswith('win')) + ']'

        d['sel_src'] = sel_src
        d['sel_src_and_win'] = sel_src_and_win
        d['sel_src_not_win'] = sel_src_not_win

        if 'source' in available:
            available_details = available['source']
            available_details['sel'] = sel_src
            filename = available_details['filename']
            if 'contrib_url' in available_details:
                contrib_url = available_details['contrib_url']
                if archive:
                    if is_tarfile:
                        available_details['cranurl'] = (INDENT + contrib_url)
                    else:
                        available_details['cranurl'] = (INDENT + contrib_url +
                            filename + sel_src + INDENT + contrib_url +
                            'Archive/{}/'.format(package) + filename + sel_src)
                else:
                    available_details['cranurl'] = ' ' + contrib_url + filename + sel_src
            if not is_github_url:
                available_details['archive_keys'] = '{fn_key} {filename} {sel}\n' \
                                                    '  {url_key}{sel}' \
                                                    '    {cranurl}\n' \
                                                    '  {hash_entry}{sel}'.format(
                    **available_details)

        d['cran_metadata'] = '\n'.join(['# %s' % l for l in
            cran_package['orig_lines'] if l])

        # Render the source and binaryN keys
        binary_id = 1
        for archive_type, archive_details in iteritems(available):
            if archive_type == 'source':
                d['source'] = SOURCE_META.format(**archive_details)
            else:
                archive_details['sel'] = '  # [' + archive_details['selector'] + ']'
                d['binary' + str(binary_id)] = BINARY_META.format(**archive_details)
                binary_id += 1

        # XXX: We should maybe normalize these
        d['license'] = cran_package.get("License", "None")
        d['license_family'] = guess_license_family(d['license'], allowed_license_families)

        if 'License_is_FOSS' in cran_package:
            d['license'] += ' (FOSS)'
        if cran_package.get('License_restricts_use') == 'yes':
            d['license'] += ' (Restricts use)'

        if "URL" in cran_package:
            d['home_comment'] = ''
            d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL'])
        else:
            # use CRAN page as homepage if nothing has been specified
            d['home_comment'] = ''
            if is_github_url:
                d['homeurl'] = ' {}'.format(location)
            else:
                d['homeurl'] = ' https://CRAN.R-project.org/package={}'.format(package)

        if not use_noarch_generic or cran_package.get("NeedsCompilation", 'no') == 'yes':
            d['noarch_generic'] = ''
        else:
            d['noarch_generic'] = 'noarch: generic'

        if 'Description' in cran_package:
            d['summary_comment'] = ''
            d['summary'] = ' ' + yaml_quote_string(cran_package['Description'])

        if "Suggests" in cran_package:
            d['suggests'] = "# Suggests: %s" % cran_package['Suggests']
        else:
            d['suggests'] = ''

        # Every package depends on at least R.
        # I'm not sure what the difference between depends and imports is.
        depends = [s.strip() for s in cran_package.get('Depends',
            '').split(',') if s.strip()]
        imports = [s.strip() for s in cran_package.get('Imports',
            '').split(',') if s.strip()]
        links = [s.strip() for s in cran_package.get("LinkingTo",
            '').split(',') if s.strip()]

        dep_dict = {}

        seen = set()
        for s in list(chain(imports, depends, links)):
            match = VERSION_DEPENDENCY_REGEX.match(s)
            if not match:
                sys.exit("Could not parse version from dependency of %s: %s" %
                    (package, s))
            name = match.group('name')
            if name in seen:
                continue
            seen.add(name)
            archs = match.group('archs')
            relop = match.group('relop') or ''
            ver = match.group('version') or ''
            ver = ver.replace('-', '_')
            # If there is a relop there should be a version
            assert not relop or ver

            if archs:
                sys.exit("Don't know how to handle archs from dependency of "
                "package %s: %s" % (package, s))

            dep_dict[name] = '{relop}{version}'.format(relop=relop, version=ver)

        if 'R' not in dep_dict:
            dep_dict['R'] = ''

        need_git = is_github_url
        if cran_package.get("NeedsCompilation", 'no') == 'yes':
            with tarfile.open(available['source']['cached_path']) as tf:
                need_f = any([f.name.lower().endswith(('.f', '.f90', '.f77')) for f in tf])
                # Fortran builds use CC to perform the link (they do not call the linker directly).
                need_c = True if need_f else \
                    any([f.name.lower().endswith('.c') for f in tf])
                need_cxx = any([f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++'))
                                         for f in tf])
                need_autotools = any([f.name.lower().endswith('/configure') for f in tf])
                need_make = True if any((need_autotools, need_f, need_cxx, need_c)) else \
                    any([f.name.lower().endswith(('/makefile', '/makevars'))
                        for f in tf])
        else:
            need_c = need_cxx = need_f = need_autotools = need_make = False

        if 'Rcpp' in dep_dict or 'RcppArmadillo' in dep_dict:
            need_cxx = True

        if need_cxx:
            need_c = True

        for dep_type in ['build', 'host', 'run']:

            deps = []
            # Put non-R dependencies first.
            if dep_type == 'build':
                if need_c:
                    deps.append("{indent}{{{{ compiler('c') }}}}      {sel}".format(
                        indent=INDENT, sel=sel_src_not_win))
                if need_cxx:
                    deps.append("{indent}{{{{ compiler('cxx') }}}}    {sel}".format(
                        indent=INDENT, sel=sel_src_not_win))
                if need_f:
                    deps.append("{indent}{{{{ compiler('fortran') }}}}{sel}".format(
                        indent=INDENT, sel=sel_src_not_win))
                if use_rtools_win:
                    need_c = need_cxx = need_f = need_autotools = need_make = False
                    deps.append("{indent}{{{{native}}}}rtools      {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                    deps.append("{indent}{{{{native}}}}extsoft     {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                if need_c or need_cxx or need_f:
                    deps.append("{indent}{{{{native}}}}toolchain      {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                if need_autotools or need_make or need_git:
                    deps.append("{indent}{{{{posix}}}}filesystem      {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                if need_git:
                    deps.append("{indent}{{{{posix}}}}git".format(indent=INDENT))
                if need_autotools:
                    deps.append("{indent}{{{{posix}}}}sed             {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                    deps.append("{indent}{{{{posix}}}}grep            {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                    deps.append("{indent}{{{{posix}}}}autoconf        {sel}".format(
                        indent=INDENT, sel=sel_src))
                    deps.append("{indent}{{{{posix}}}}automake-wrapper{sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                    deps.append("{indent}{{{{posix}}}}automake        {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))
                    deps.append("{indent}{{{{posix}}}}pkg-config".format(indent=INDENT))
                if need_make:
                    deps.append("{indent}{{{{posix}}}}make            {sel}".format(
                        indent=INDENT, sel=sel_src))
            elif dep_type == 'run':
                if need_c or need_cxx or need_f:
                    deps.append("{indent}{{{{native}}}}gcc-libs       {sel}".format(
                        indent=INDENT, sel=sel_src_and_win))

            if dep_type == 'host' or dep_type == 'run':
                for name in sorted(dep_dict):
                    if name in R_BASE_PACKAGE_NAMES:
                        continue
                    if name == 'R':
                        # Put R first
                        # Regarless of build or run, and whether this is a
                        # recommended package or not, it can only depend on
                        # r_interp since anything else can and will cause
                        # cycles in the dependency graph. The cran metadata
                        # lists all dependencies anyway, even those packages
                        # that are in the recommended group.
                        # We don't include any R version restrictions because
                        # conda-build always pins r-base and mro-base version.
                        deps.insert(0, '{indent}{r_name}'.format(indent=INDENT, r_name=r_interp))
                    else:
                        conda_name = 'r-' + name.lower()

                        if dep_dict[name]:
                            deps.append('{indent}{name} {version}'.format(name=conda_name,
                                version=dep_dict[name], indent=INDENT))
                        else:
                            deps.append('{indent}{name}'.format(name=conda_name,
                                indent=INDENT))
                        if recursive:
                            lower_name = name.lower()
                            if lower_name not in package_dicts:
                                inputs_dict = package_to_inputs_dict(output_dir, output_suffix,
                                                                     git_tag, lower_name)
                                assert lower_name == inputs_dict['pkg-name'], \
                                    "name %s != inputs_dict['pkg-name'] %s" % (
                                        name, inputs_dict['pkg-name'])
                                assert lower_name not in package_list
                                package_dicts.update({lower_name: {'inputs': inputs_dict}})
                                package_list.append(lower_name)

            d['%s_depends' % dep_type] = ''.join(deps)

    for package in package_dicts:
        d = package_dicts[package]
        dir_path = d['inputs']['new-location']
        if exists(dir_path) and not version_compare:
            if update_policy == 'error':
                raise RuntimeError("directory already exists "
                                   "(and --update-policy is 'error'): %s" % dir_path)
            elif update_policy == 'overwrite':
                rm_rf(dir_path)
        elif update_policy == 'skip-up-to-date' and up_to_date(cran_metadata,
                                                               d['inputs']['old-metadata']):
            continue
        elif update_policy == 'skip-existing' and d['inputs']['old-metadata']:
            continue

        # Normalize the metadata values
        d = {k: unicodedata.normalize("NFKD", text_type(v)).encode('ascii', 'ignore')
             .decode() for k, v in iteritems(d)}
        try:
            makedirs(join(dir_path))
        except:
            pass
        print("Writing recipe for %s" % package.lower())
        with open(join(dir_path, 'meta.yaml'), 'w') as f:
            f.write(clear_whitespace(CRAN_META.format(**d)))
        if not exists(join(dir_path, 'build.sh')) or update_policy == 'overwrite':
            with open(join(dir_path, 'build.sh'), 'w') as f:
                if from_source == all:
                    f.write(CRAN_BUILD_SH_SOURCE.format(**d))
                elif from_source == []:
                    f.write(CRAN_BUILD_SH_BINARY.format(**d))
                else:
                    tpbt = [target_platform_bash_test_by_sel[t] for t in from_source]
                    d['source_pf_bash'] = ' || '.join(['[[ $target_platform ' + s + ' ]]'
                                                  for s in tpbt])
                    f.write(CRAN_BUILD_SH_MIXED.format(**d))

        if not exists(join(dir_path, 'bld.bat')) or update_policy == 'overwrite':
            with open(join(dir_path, 'bld.bat'), 'w') as f:
                if len([fs for fs in from_source if fs.startswith('win')]) == 2:
                    f.write(CRAN_BLD_BAT_SOURCE.format(**d))
                else:
                    f.write(CRAN_BLD_BAT_MIXED.format(**d))
Example #15
0
def render_recipe(recipe_path,
                  config,
                  no_download_source=False,
                  variants=None,
                  permit_unsatisfiable_variants=True,
                  reset_build_id=True):
    """Returns a list of tuples, each consisting of

    (metadata-object, needs_download, needs_render_in_env)

    You get one tuple per variant.  Outputs are not factored in here (subpackages won't affect these
    results returned here.)
    """
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return None, None
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    if config.set_build_id:
        m.config.compute_build_id(m.name(), reset=reset_build_id)

    if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or
                                      len(os.listdir(m.config.work_dir)) == 0):
        try_download(m, no_download_source=no_download_source)

    rendered_metadata = {}

    if m.final:
        rendered_metadata = [
            (m, False, False),
        ]

    else:
        index, index_ts = get_build_index(m.config, m.config.build_subdir)
        # when building, we don't want to fully expand all outputs into metadata, only expand
        #    whatever variants we have.
        variants = (dict_of_lists_to_list_of_dicts(variants)
                    if variants else get_package_variants(m))
        rendered_metadata = distribute_variants(
            m,
            variants,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants,
            stub_subpackages=True)

    if need_cleanup:
        utils.rm_rf(recipe_dir)

    return rendered_metadata
Example #16
0
def render_recipe(recipe_path, config, no_download_source=False, variants=None,
                  permit_unsatisfiable_variants=True, reset_build_id=True, bypass_env_check=False):
    """Returns a list of tuples, each consisting of

    (metadata-object, needs_download, needs_render_in_env)

    You get one tuple per variant.  Outputs are not factored in here (subpackages won't affect these
    results returned here.)
    """
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return None, None
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    rendered_metadata = {}

    # important: set build id *before* downloading source.  Otherwise source goes into a different
    #    build folder.
    if config.set_build_id:
        m.config.compute_build_id(m.name(), reset=reset_build_id)

    # this source may go into a folder that doesn't match the eventual build folder.
    #   There's no way around it AFAICT.  We must download the source to be able to render
    #   the recipe (from anything like GIT_FULL_HASH), but we can't know the final build
    #   folder until rendering is complete, because package names can have variant jinja2 in them.
    if m.needs_source_for_render and not m.source_provided:
        try_download(m, no_download_source=no_download_source)

    if m.final:
        if not hasattr(m.config, 'variants') or not m.config.variant:
            m.config.ignore_system_variants = True
            if os.path.isfile(os.path.join(m.path, 'conda_build_config.yaml')):
                m.config.variant_config_files = [os.path.join(m.path, 'conda_build_config.yaml')]
            m.config.variants = get_package_variants(m, variants=variants)
            m.config.variant = m.config.variants[0]
        rendered_metadata = [(m, False, False), ]
    else:
        # merge any passed-in variants with any files found
        variants = get_package_variants(m, variants=variants)

        # when building, we don't want to fully expand all outputs into metadata, only expand
        #    whatever variants we have (i.e. expand top-level variants, not output-only variants)
        rendered_metadata = distribute_variants(m, variants,
                                    permit_unsatisfiable_variants=permit_unsatisfiable_variants,
                                    allow_no_other_outputs=True, bypass_env_check=bypass_env_check)
    if need_cleanup:
        utils.rm_rf(recipe_dir)
    return rendered_metadata