Beispiel #1
0
    def __call__(self, args):
        # check some error conditions
        if args.recipe_directory and not os.path.isdir(args.recipe_directory):
            raise IOError("The source recipe directory should be the directory of the "
                          "conda-recipe you want to build a feedstock for. Got {}".format(
                args.recipe_directory))

        # Get some information about the source recipe.
        if args.recipe_directory:
            meta = MetaData(args.recipe_directory)
        else:
            meta = None

        feedstock_directory = args.feedstock_directory.format(package=argparse.Namespace(name=meta.name()))
        msg = 'Initial commit of the {} feedstock.'.format(meta.name())

        try:
            generate_feedstock_content(feedstock_directory, args.recipe_directory, meta)
            if not args.no_git_repo:
                create_git_repo(feedstock_directory, msg)

            print("\nRepository created, please edit conda-forge.yml to configure the upload channels\n"
                  "and afterwards call 'conda smithy register-github'")
        except RuntimeError as e:
            print(e)
def read_recipe_name_version_build(meta_yaml_path):
    """
    Read the given metadata file and return (package_name, version, build_number)
    
    meta_yaml_path: May be a path to a meta.yaml file or it's parent recipe directory.
    """
    # Provide these default values, otherwise conda-build will
    # choke on jinja templates that reference them.
    # This will be fixed when they finally merge conda-build PR#662 and PR#666
    if "CONDA_NPY" not in os.environ:
        os.environ["CONDA_NPY"] = '19'
    if "CONDA_PY" not in os.environ:
        os.environ["CONDA_PY"] = '27'
    os.environ["GIT_FULL_HASH"] = "9999999"

    if os.path.isdir(meta_yaml_path):    
        recipe_dir = meta_yaml_path
    else:
        recipe_dir = os.path.split(meta_yaml_path)[0]

    try:
        metadata = MetaData(recipe_dir)
        return (metadata.name(), metadata.version(), metadata.build_number())
    except SystemExit as ex:
        raise Exception(*ex.args)
Beispiel #3
0
def built_package_path(recipe, env=None):
    """
    Returns the path to which a recipe would be built.

    Does not necessarily exist; equivalent to `conda build --output recipename`
    but without the subprocess.
    """
    if env is None:
        env = {}
    env = dict(env)

    # Ensure CONDA_PY is an integer (needed by conda-build 2.0.4)
    py = env.get('CONDA_PY', None)
    env = dict(env)
    if py is not None:
        env['CONDA_PY'] = _string_or_float_to_integer_python(py)

    with temp_env(env):
        # Disabling set_build_id prevents the creation of uniquely-named work
        # directories just for checking the output file.
        # It needs to be done within the context manager so that it sees the
        # os.environ.
        config = api.Config(
            no_download_source=True,
            set_build_id=False)
        meta = MetaData(recipe, config=config)
        meta.parse_again()
        path = api.get_output_file_path(meta, config=config)
    return path
Beispiel #4
0
    def __call__(self, args):
        # check some error conditions
        if args.recipe_directory and not os.path.isdir(args.recipe_directory):
            raise IOError(
                "The source recipe directory should be the directory of the "
                "conda-recipe you want to build a feedstock for. Got {}".
                format(args.recipe_directory))

        # Get some information about the source recipe.
        if args.recipe_directory:
            meta = MetaData(args.recipe_directory)
        else:
            meta = None

        feedstock_directory = args.feedstock_directory.format(
            package=argparse.Namespace(name=meta.name()))
        msg = 'Initial commit of the {} feedstock.'.format(meta.name())

        try:
            generate_feedstock_content(feedstock_directory,
                                       args.recipe_directory, meta)
            if not args.no_git_repo:
                create_git_repo(feedstock_directory, msg)

            print(
                "\nRepository created, please edit conda-forge.yml to configure the upload channels\n"
                "and afterwards call 'conda smithy register-github'")
        except RuntimeError as e:
            print(e)
def collapse_subpackage_nodes(graph):
    """Collapse all subpackage nodes into their parent recipe node

    We get one node per output, but a given recipe can have multiple outputs.  It's important
    for dependency ordering in the graph that the outputs exist independently, but once those
    dependencies are established, we need to collapse subpackages down to a single job for the
    top-level recipe."""
    # group nodes by their recipe path first, then within those groups by their variant
    node_groups = {}
    for node in graph.nodes():
        if 'meta' in graph.node[node]:
            meta = graph.node[node]['meta']
            meta_path = meta.meta_path or meta.meta['extra']['parent_recipe']['path']
            master = False

            master_meta = MetaData(meta_path, config=meta.config)
            if master_meta.name() == meta.name():
                master = True
            group = node_groups.get(meta_path, {})
            subgroup = group.get(HashableDict(meta.config.variant), {})
            if master:
                if 'master' in subgroup:
                    raise ValueError("tried to set more than one node in a group as master")
                subgroup['master'] = node
            else:
                sps = subgroup.get('subpackages', [])
                sps.append(node)
                subgroup['subpackages'] = sps
            group[HashableDict(meta.config.variant)] = subgroup
            node_groups[meta_path] = group

    for recipe_path, group in node_groups.items():
        for variant, subgroup in group.items():
            # if no node is the top-level recipe (only outputs, no top-level output), need to obtain
            #     package/name from recipe given by common recipe path.
            subpackages = subgroup.get('subpackages')
            if 'master' not in subgroup:
                sp0 = graph.node[subpackages[0]]
                master_meta = MetaData(recipe_path, config=sp0['meta'].config)
                worker = sp0['worker']
                master_key = package_key(master_meta, worker['label'])
                graph.add_node(master_key, meta=master_meta, worker=worker)
                master = graph.node[master_key]
            else:
                master = subgroup['master']
                master_key = package_key(graph.node[master]['meta'],
                                         graph.node[master]['worker']['label'])
            # fold in dependencies for all of the other subpackages within a group.  This is just
            #     the intersection of the edges between all nodes.  Store this on the "master" node.
            if subpackages:
                remap_edges = [edge for edge in graph.edges() if edge[1] in subpackages]
                for edge in remap_edges:
                    # make sure not to add references to yourself
                    if edge[0] != master_key:
                        graph.add_edge(edge[0], master_key)
                    graph.remove_edge(*edge)

                # remove nodes that have been folded into master nodes
                for subnode in subpackages:
                    graph.remove_node(subnode)
Beispiel #6
0
def get_tests(path):
    "Extract tests from a built package"
    tmp = tempfile.mkdtemp()
    t = tarfile.open(path)
    t.extractall(tmp)
    input_dir = os.path.join(tmp, 'info', 'recipe')

    tests = []
    recipe_meta = MetaData(input_dir)

    tests_commands = recipe_meta.get_value('test/commands')
    tests_imports = recipe_meta.get_value('test/imports')
    requirements = recipe_meta.get_value('requirements/run')

    if tests_imports or tests_commands:
        if tests_commands:
            tests.append(' && '.join(tests_commands))
        if tests_imports and 'python' in requirements:
            tests.append(' && '.join('python -c "import %s"' % imp
                                     for imp in tests_imports))
        elif tests_imports and ('perl' in requirements
                                or 'perl-threaded' in requirements):
            tests.append(' && '.join('''perl -e "use %s;"''' % imp
                                     for imp in tests_imports))

    tests = ' && '.join(tests)
    tests = tests.replace('$R ', 'Rscript ')
    return tests
Beispiel #7
0
def collapse_subpackage_nodes(graph):
    """Collapse all subpackage nodes into their parent recipe node

    We get one node per output, but a given recipe can have multiple outputs.  It's important
    for dependency ordering in the graph that the outputs exist independently, but once those
    dependencies are established, we need to collapse subpackages down to a single job for the
    top-level recipe."""
    # group nodes by their recipe path first, then within those groups by their variant
    node_groups = {}
    for node in graph.nodes():
        if 'meta' in graph.node[node]:
            meta = graph.node[node]['meta']
            meta_path = meta.meta_path or meta.meta['extra']['parent_recipe']['path']
            master = False

            master_meta = MetaData(meta_path, config=meta.config)
            if master_meta.name() == meta.name():
                master = True
            group = node_groups.get(meta_path, {})
            subgroup = group.get(HashableDict(meta.config.variant), {})
            if master:
                if 'master' in subgroup:
                    raise ValueError("tried to set more than one node in a group as master")
                subgroup['master'] = node
            else:
                sps = subgroup.get('subpackages', [])
                sps.append(node)
                subgroup['subpackages'] = sps
            group[HashableDict(meta.config.variant)] = subgroup
            node_groups[meta_path] = group

    for recipe_path, group in node_groups.items():
        for variant, subgroup in group.items():
            # if no node is the top-level recipe (only outputs, no top-level output), need to obtain
            #     package/name from recipe given by common recipe path.
            subpackages = subgroup.get('subpackages')
            if 'master' not in subgroup:
                sp0 = graph.node[subpackages[0]]
                master_meta = MetaData(recipe_path, config=sp0['meta'].config)
                worker = sp0['worker']
                master_key = package_key(master_meta, worker['label'])
                graph.add_node(master_key, meta=master_meta, worker=worker)
                master = graph.node[master_key]
            else:
                master = subgroup['master']
                master_key = package_key(graph.node[master]['meta'],
                                         graph.node[master]['worker']['label'])
            # fold in dependencies for all of the other subpackages within a group.  This is just
            #     the intersection of the edges between all nodes.  Store this on the "master" node.
            if subpackages:
                remap_edges = [edge for edge in graph.edges() if edge[1] in subpackages]
                for edge in remap_edges:
                    # make sure not to add references to yourself
                    if edge[0] != master_key:
                        graph.add_edge(edge[0], master_key)
                    graph.remove_edge(*edge)

                # remove nodes that have been folded into master nodes
                for subnode in subpackages:
                    graph.remove_node(subnode)
Beispiel #8
0
    def __call__(self, args):
        # check some error conditions
        if args.recipe_directory and not os.path.isdir(args.recipe_directory):
            raise IOError(
                "The source recipe directory should be the directory of the "
                "conda-recipe you want to build a feedstock for. Got {}".
                format(args.recipe_directory))

        # Get some information about the source recipe.
        if args.recipe_directory:
            meta = MetaData(args.recipe_directory)
        else:
            meta = None

        feedstock_directory = args.feedstock_directory.format(
            package=argparse.Namespace(name=meta.name()))
        msg = "Initial feedstock commit with conda-smithy {}.".format(
            __version__)

        os.makedirs(feedstock_directory)
        subprocess.check_call(["git", "init"], cwd=feedstock_directory)
        generate_feedstock_content(feedstock_directory, args.recipe_directory)
        subprocess.check_call(["git", "commit", "-m", msg],
                              cwd=feedstock_directory)

        print(
            "\nRepository created, please edit conda-forge.yml to configure the upload channels\n"
            "and afterwards call 'conda smithy register-github'")
Beispiel #9
0
def version_compare(package, versions):
    if not versions:
        # PyPI is case sensitive, this will pass control
        # to a method in main() to take care of that.
        return

    nv = normalized_version

    norm_versions = [nv(ver) for ver in versions]

    recipe_dir = abspath(package.lower())
    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)
    m = MetaData(recipe_dir)
    local_version = nv(m.version())
    print("Local recipe for %s has version %s" % (package, local_version))
    if local_version not in versions:
        sys.exit("Error: %s %s is not available on PyPI."
                 % (package, local_version))
    else:
        # Comparing normalized versions, displaying non normalized ones
        new_versions = versions[:norm_versions.index(local_version)]
        if len(new_versions) > 0:
            print("Following new versions of %s are avaliable" % (package))
            for ver in new_versions:
                print(ver)
        else:
            print("No new version for %s is available" % (package))
        sys.exit()
Beispiel #10
0
def version_compare(package, versions):
    if not versions:
        # PyPI is case sensitive, this will pass control
        # to a method in main() to take care of that.
        return

    nv = normalized_version

    norm_versions = [nv(ver) for ver in versions]

    recipe_dir = abspath(package.lower())
    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)
    m = MetaData(recipe_dir)
    local_version = nv(m.version())
    print("Local recipe for %s has version %s" % (package, local_version))
    if local_version not in versions:
        sys.exit("Error: %s %s is not available on PyPI." %
                 (package, local_version))
    else:
        # Comparing normalized versions, displaying non normalized ones
        new_versions = versions[:norm_versions.index(local_version)]
        if len(new_versions) > 0:
            print("Following new versions of %s are avaliable" % (package))
            for ver in new_versions:
                print(ver)
        else:
            print("No new version for %s is available" % (package))
        sys.exit()
Beispiel #11
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile, join

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import croot
    from conda_build.metadata import MetaData

    check_external()

    with Locked(croot):
        for arg in args.recipe:
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            m = MetaData(recipe_dir)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                build.build(m)
                if not args.notest:
                    build.test(m)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
Beispiel #12
0
def buildDAG(args, modified_files, formula_dir):
    oDag = dag.DAG()
    dMap = {}
    common_names = set([])
    for directory, d_list, f_list in os.walk(formula_dir):
        if 'meta.yaml' in f_list:
            meta = MetaData(os.path.join(directory, 'meta.yaml'))
            reqs = meta.meta['requirements']
            combined_deps = set(reqs.get('build', '')).union(reqs.get('run', ''))
            common_names.add(meta.name())
            dMap[meta.meta_path] = (meta.name(), combined_deps, meta)

    # Populate DAG
    [oDag.add_node(x) for x in dMap.keys()]

    # Create edges
    for ind_node, name, dependencies, meta, dag_node in _walkMapAndDag(dMap, oDag):
        controlled_dependencies = set(dependencies).intersection(common_names)
        if dMap[dag_node][0] in controlled_dependencies:
            oDag.add_edge(dag_node, ind_node)

    # Remove edges (skips, unmodified recipes, etc)
    for ind_node, name, dependencies, meta, dag_node in _walkMapAndDag(dMap, oDag):
        controlled_dependencies = set(dependencies).intersection(common_names)
        if ind_node not in modified_files and controlled_dependencies and args.dependencies:
            continue
        elif ind_node not in modified_files:
            oDag.delete_node_if_exists(ind_node)

    return oDag
Beispiel #13
0
def get_tests(path):
    "Extract tests from a built package"
    tmp = tempfile.mkdtemp()
    t = tarfile.open(path)
    t.extractall(tmp)
    input_dir = os.path.join(tmp, 'info', 'recipe')

    tests = [
        '/usr/local/env-execute true',
        '. /usr/local/env-activate.sh',
    ]
    recipe_meta = MetaData(input_dir)

    tests_commands = recipe_meta.get_value('test/commands')
    tests_imports = recipe_meta.get_value('test/imports')
    requirements = recipe_meta.get_value('requirements/run')

    if tests_imports or tests_commands:
        if tests_commands:
            tests.append(' && '.join(tests_commands))
        if tests_imports and 'python' in requirements:
            tests.append(' && '.join('python -c "import %s"' % imp
                                     for imp in tests_imports))
        elif tests_imports and ('perl' in requirements
                                or 'perl-threaded' in requirements):
            tests.append(' && '.join('''perl -e "use %s;"''' % imp
                                     for imp in tests_imports))

    tests = ' && '.join(tests)
    tests = tests.replace('$R ', 'Rscript ')
    # this is specific to involucro, the way how we build our containers
    tests = tests.replace('$PREFIX', '/usr/local')
    tests = tests.replace('${PREFIX}', '/usr/local')

    return f"bash -c {shlex.quote(tests)}"
Beispiel #14
0
def render_recipe(recipe_path, config, no_download_source=False, variants=None,
                  permit_unsatisfiable_variants=True, reset_build_id=True, expand_output=False):
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return None, None
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    if config.set_build_id:
        m.config.compute_build_id(m.name(), reset=reset_build_id)

    if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or
                                      len(os.listdir(m.config.work_dir)) == 0):
        try_download(m, no_download_source=no_download_source)

    rendered_metadata = {}

    if m.final:
        rendered_metadata = [(m, False, False), ]
        index = None
    else:
        variants = (dict_of_lists_to_list_of_dicts(variants, m.config.platform)
                    if variants else get_package_variants(m, m.config))
        index = get_build_index(m.config, m.config.build_subdir)
        rendered_metadata = distribute_variants(m, variants, index,
                                        permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        if not rendered_metadata:
            raise ValueError("No variants were satisfiable - no valid recipes could be rendered.")

    if expand_output:
        rendered_metadata = expand_outputs(rendered_metadata, index)

    if need_cleanup:
        utils.rm_rf(recipe_dir)

    return rendered_metadata, index
def main():
    token = os.environ.get('BINSTAR_TOKEN')

    description = ('Upload or check consistency of a built version of a '
                   'conda recipe with binstar. Note: The existence of the '
                   'BINSTAR_TOKEN environment variable determines '
                   'whether the upload should actually take place.')
    parser = argparse.ArgumentParser(description=description)
    parser.add_argument('recipe_dir', help='the conda recipe directory')
    parser.add_argument('owner', help='the binstar owner/user')
    parser.add_argument('--channel', help='the binstar channel', default='main')
    args = parser.parse_args()
    recipe_dir, owner, channel = args.recipe_dir, args.owner, args.channel

    cli = get_server_api(token=token)
    meta_main = MetaData(recipe_dir)
    for _, meta in meta_main.get_output_metadata_set(files=None):
        print("Processing {}".format(meta.name()))
        if meta.skip():
            print("No upload to take place - this configuration was skipped in build/skip.")
            continue
        exists = built_distribution_already_exists(cli, meta, owner)
        if token:
            if not exists:
                upload(cli, meta, owner, channel)
                print('Uploaded {}'.format(bldpkg_path(meta)))
            else:
                print('Distribution {} already \nexists for {}.'
                      ''.format(bldpkg_path(meta), owner))
        else:
            print("No BINSTAR_TOKEN present, so no upload is taking place. "
                  "The distribution just built {} already available for {}."
                  "".format('is' if exists else 'is not', owner))
Beispiel #16
0
def read_recipe_name_version_build(meta_yaml_path):
    """
    Read the given metadata file and return (package_name, version, build_number)
    
    meta_yaml_path: May be a path to a meta.yaml file or it's parent recipe directory.
    """
    # Provide these default values, otherwise conda-build will
    # choke on jinja templates that reference them.
    # This will be fixed when they finally merge conda-build PR#662 and PR#666
    if "CONDA_NPY" not in os.environ:
        os.environ["CONDA_NPY"] = '19'
    if "CONDA_PY" not in os.environ:
        os.environ["CONDA_PY"] = '27'
    os.environ["GIT_FULL_HASH"] = "9999999"

    if os.path.isdir(meta_yaml_path):
        recipe_dir = meta_yaml_path
    else:
        recipe_dir = os.path.split(meta_yaml_path)[0]

    try:
        metadata = MetaData(recipe_dir)
        return (metadata.name(), metadata.version(), metadata.build_number())
    except SystemExit as ex:
        raise Exception(*ex.args)
Beispiel #17
0
    def __call__(self, args):
        # check some error conditions
        if args.recipe_directory and not os.path.isdir(args.recipe_directory):
            raise IOError(
                "The source recipe directory should be the directory of the "
                "conda-recipe you want to build a feedstock for. Got {}".format(
                    args.recipe_directory
                )
            )

        # Get some information about the source recipe.
        if args.recipe_directory:
            meta = MetaData(args.recipe_directory)
        else:
            meta = None

        feedstock_directory = args.feedstock_directory.format(
            package=argparse.Namespace(name=meta.name())
        )
        msg = "Initial feedstock commit with conda-smithy {}.".format(
            __version__
        )

        os.makedirs(feedstock_directory)
        subprocess.check_call(["git", "init"], cwd=feedstock_directory)
        generate_feedstock_content(feedstock_directory, args.recipe_directory)
        subprocess.check_call(
            ["git", "commit", "-m", msg], cwd=feedstock_directory
        )

        print(
            "\nRepository created, please edit conda-forge.yml to configure the upload channels\n"
            "and afterwards call 'conda smithy register-github'"
        )
def setup(*args):
    """
    Go through every folder in the `bioconda-recipes/recipes` dir
    and generate a README.rst file.
    """
    print('Generating package READMEs...')
    summaries = []
    for folder in os.listdir(RECIPE_DIR):
        # Subfolders correspond to different versions
        versions = []
        for sf in os.listdir(op.join(RECIPE_DIR, folder)):
            if not op.isdir(op.join(RECIPE_DIR, folder, sf)):
                # Not a folder
                continue
            try:
                LooseVersion(sf)
            except ValueError:
                print("'{}' does not look like a proper version!".format(sf))
                continue
            versions.append(sf)
        versions.sort(key=LooseVersion, reverse=True)
        # Read the meta.yaml file
        try:
            metadata = MetaData(op.join(RECIPE_DIR, folder))
            if metadata.version() not in versions:
                versions.insert(0, metadata.version())
        except SystemExit:
            if versions:
                metadata = MetaData(op.join(RECIPE_DIR, folder, versions[0]))
            else:
                # ignore non-recipe folders
                continue

        # Format the README
        notes = metadata.get_section('extra').get('notes', '')
        if notes:
            notes = 'Notes\n-----\n\n' + notes
        summary = metadata.get_section('about').get('summary', '')
        summaries.append(summary)
        template_options = {
            'title': metadata.name(),
            'title_underline': '=' * len(metadata.name()),
            'summary': summary,
            'home': metadata.get_section('about').get('home', ''),
            'versions': ', '.join(versions),
            'license': metadata.get_section('about').get('license', ''),
            'recipe': ('https://github.com/bioconda/bioconda-recipes/tree/master/recipes/' +
                op.dirname(op.relpath(metadata.meta_path, RECIPE_DIR))),
            'notes': notes
        }
        readme = README_TEMPLATE.format(**template_options)
        # Write to file
        try:
            os.makedirs(op.join(OUTPUT_DIR, folder))  # exist_ok=True on Python 3
        except OSError:
            pass
        output_file = op.join(OUTPUT_DIR, folder, 'README.rst')
        with open(output_file, 'wb') as ofh:
            ofh.write(readme.encode('utf-8'))
Beispiel #19
0
def main():
    metadata = MetaData(os.environ["RECIPE_DIR"])
    build_id = os.getcwd().split(os.path.sep)[-3]
    print "build_id:", build_id
    for name, section in metadata.get_section("extra").items():
        source.provide(
            Source(section),
            config.Config(build_id=build_id))
Beispiel #20
0
def test_meta_sorting_version_strip():
    m1 = MetaData.fromdict({'package':
                                {'name': 'a'},
                            'requirements':
                                {'build': ['b > 1.2']}})
    m2 = MetaData.fromdict({'package':
                                {'name': 'b'}})
    metas = sort_dependency_order([m1, m2])
    assert_equal([meta.name() for meta in metas], ['b', 'a'])
Beispiel #21
0
def meta_of_feedstock(forge_dir, config=None):
    recipe_dir = 'recipe'
    meta_dir = os.path.join(forge_dir, recipe_dir)
    if not os.path.exists(meta_dir):
        raise IOError("The given directory isn't a feedstock.")
    if hasattr(conda_build, 'api'):
        meta = MetaData(meta_dir, config=config)
    else:
        meta = MetaData(meta_dir)
    return meta
Beispiel #22
0
 def __call__(self, args):
     if not os.path.isdir(args.recipe_directory):
         raise IOError(
             "The recipe directory should be the directory of the conda-recipe. Got {}"
             .format(args.recipe_directory))
     meta = MetaData(args.recipe_directory)
     feedstock_directory = args.feedstock_directory.format(
         package=argparse.Namespace(name=meta.name()))
     generate_feedstock_content(feedstock_directory, args.recipe_directory)
     if not args.no_git_repo:
         create_git_repo(feedstock_directory, meta)
Beispiel #23
0
    def tobuild(recipe, env):
        pkg = os.path.basename(built_package_path(recipe, env))

        in_channels = [
            channel for channel, pkgs in channel_packages.items()
            if pkg in pkgs
        ]
        if in_channels and not force:
            logger.debug(
                'FILTER: not building %s because '
                'it is in channel(s) and it is not forced: %s', pkg,
                in_channels)
            return False

        # with temp_env, MetaData will see everything in env added to
        # os.environ.
        with temp_env(env):

            # with temp_os, we can fool the MetaData if needed.
            platform = os.environ.get('TRAVIS_OS_NAME', sys.platform)

            # TRAVIS_OS_NAME uses 'osx', but sys.platform uses 'darwin', and
            # that's what conda will be looking for.
            if platform == 'osx':
                platform = 'darwin'

            with temp_os(platform):
                meta = MetaData(recipe)
                if meta.skip():
                    logger.debug(
                        'FILTER: not building %s because '
                        'it defines skip for this env', pkg)
                    return False

                # If on travis, handle noarch.
                if os.environ.get('TRAVIS', None) == 'true':
                    if meta.get_value('build/noarch'):
                        if platform != 'linux':
                            logger.debug('FILTER: only building %s on '
                                         'linux because it defines noarch.',
                                         pkg)
                            return False

        assert not pkg.endswith("_.tar.bz2"), (
            "rendered path {} does not "
            "contain a build number and recipe does not "
            "define skip for this environment. "
            "This is a conda bug.".format(pkg))

        logger.debug(
            'FILTER: building %s because it is not in channels and '
            'does not define skip', pkg)
        return True
Beispiel #24
0
def load_meta(recipe, env):
    """
    Load metadata for a specific environment.
    """
    with temp_env(env):
        # Disabling set_build_id prevents the creation of uniquely-named work
        # directories just for checking the output file.
        # It needs to be done within the context manager so that it sees the
        # os.environ.
        config = api.Config(no_download_source=True, set_build_id=False)
        meta = MetaData(recipe, config=config)
        meta.parse_again()
        return meta.meta
def main():
    recipe_dir = os.environ["RECIPE_DIR"]
    src_dir = os.environ["SRC_DIR"]
    main_work_dir = source.WORK_DIR

    metadata = MetaData(recipe_dir)
    extra_sources_sections = metadata.get_section('extra')['sources']

    for name, source_section in extra_sources_sections.items():
        # Override the location to clone into
        source.WORK_DIR = main_work_dir + '/' + name
        os.makedirs(source.WORK_DIR)

        # Download source
        source.provide(recipe_dir, source_section)
Beispiel #26
0
def get_deps(recipe, build=True):
    """
    Generator of dependencies for a single recipe, which can be specified as
    a path or as a parsed MetaData.

    Only names (not versions) of dependencies are yielded. Use `build=True` to
    yield build dependencies, otherwise yield run dependencies.
    """
    if isinstance(recipe, str):
        metadata = MetaData(recipe)
    else:
        metadata = recipe
    for dep in metadata.get_value(
            "requirements/{}".format("build" if build else "run"), []):
        yield dep.split()[0]
def main():
    recipe_dir = os.environ["RECIPE_DIR"]
    src_dir = os.environ["SRC_DIR"]
    main_work_dir = source.WORK_DIR
    
    metadata = MetaData(recipe_dir)
    extra_sources_sections = metadata.get_section('extra')['sources']
    
    for name, source_section in extra_sources_sections.items():
        # Override the location to clone into
        source.WORK_DIR = main_work_dir + '/' + name
        os.makedirs(source.WORK_DIR)
    
        # Download source
        source.provide(recipe_dir, source_section)
def test_add_intradependencies():
    a_meta = MetaData.fromdict({'package': {'name': 'a', 'version': '1.0'}})
    b_meta = MetaData.fromdict({
        'package': {
            'name': 'b',
            'version': '1.0'
        },
        'requirements': {
            'build': ['a']
        }
    })
    g = nx.DiGraph()
    g.add_node('a', meta=a_meta)
    g.add_node('b', meta=b_meta)
    compute_build_graph.add_intradependencies(g)
    assert ('b', 'a') in g.edges()
Beispiel #29
0
def create_metapackage(name,
                       version,
                       entry_points=(),
                       build_string=None,
                       build_number=0,
                       dependencies=(),
                       home=None,
                       license_name=None,
                       summary=None,
                       config=None):
    # local import to avoid circular import, we provid create_metapackage in api
    from conda_build.api import build

    if not config:
        config = Config()

    d = defaultdict(dict)
    d['package']['name'] = name
    d['package']['version'] = version
    d['build']['number'] = build_number
    d['build']['entry_points'] = entry_points
    # MetaData does the auto stuff if the build string is None
    d['build']['string'] = build_string
    d['requirements']['run'] = dependencies
    d['about']['home'] = home
    d['about']['license'] = license_name
    d['about']['summary'] = summary
    d = dict(d)
    m = MetaData.fromdict(d, config=config)
    config.compute_build_id(m.name(), m.version())

    return build(m, config=config, need_source_download=False)
Beispiel #30
0
def list_metas(directory, max_depth=0, config=None):
    """
    Get the build metadata of all recipes in a directory.

    The order of metas from this function is not guaranteed.

    Parameters
    ----------
    directory
        Where to start looking for metas using os.walk.
    max_depth : int
        How deep to recurse when looking for recipes.
        A value ``<=0`` will recurse indefinitely. A value of 1
        will look in the given directory for a meta.yaml.
        (default: 0)

    """
    packages = []
    current_depth = max_depth
    root = os.path.normpath(directory)
    for new_root, dirs, files in os.walk(root, followlinks=True):
        depth = new_root[len(root):].count(os.path.sep) + 1
        if max_depth > 0 and depth >= max_depth:
            del dirs[:]

        if 'meta.yaml' in files:
            if hasattr(conda_build, 'api'):
                packages.append(
                    conda_build.api.render(new_root, config=config)[0])
            else:
                packages.append(MetaData(new_root))

    return packages
Beispiel #31
0
def build_package(package, version=None):
    if conda_package_exists(package):
        return 0
    if ' ' in package:
        package, version = package.split(' ')
    try:
        directory = build_recipe(package, version=version)
        dependencies = convert_recipe(directory, package)
    except RuntimeError:
        directory, dependencies = make_recipe(package, version)

    try:
        print("package = %s" % package)
        print("   dependencies = %s" % dependencies)
        # Dependencies will be either package_name or
        #  package_name version_number
        # Only == dependency specs get version numbers
        # All else are just handled without a version spec
        for depend in dependencies:
            build_package(depend)
        args = build_template.format(directory).split()
        print("Building conda package for {0}".format(package.lower()))
        result = subprocess.Popen(args).wait()
        if result == 0 and binstar_upload:
            m = MetaData(directory)
            handle_binstar_upload(build.bldpkg_path(m))
    finally:
        rm_rf(directory)
    return result
 def write_meta(self, recipe_dir_name, spec):
     recipe_dir = os.path.join(self.recipes_root_dir, recipe_dir_name)
     if not os.path.exists(recipe_dir):
         os.makedirs(recipe_dir)
     with open(os.path.join(recipe_dir, 'meta.yaml'), 'w') as fh:
         fh.write(textwrap.dedent(spec))
     return MetaData(recipe_dir)
def get_deps(recipe, build=True):
    """
    Generator of dependencies for a single recipe, which can be specified as
    a path or as a parsed MetaData.

    Only names (not versions) of dependencies are yielded. Use `build=True` to
    yield build dependencies, otherwise yield run dependencies.
    """
    if isinstance(recipe, str):
        metadata = MetaData(recipe)
    else:
        metadata = recipe
    for dep in metadata.get_value(
        "requirements/{}".format("build" if build else "run"), []
    ):
        yield dep.split()[0]
Beispiel #34
0
def get_dag(recipes):
    """
    `recipes` is an iterable of recipe paths.

    Returns the DAG of recipe paths and a dictionary that maps package names to
    recipe paths. These recipe path values are lists and contain paths to all
    defined versions.
    """
    recipes = list(recipes)
    metadata = [MetaData(recipe) for recipe in recipes]

    # meta.yaml's package:name mapped to the recipe path
    name2recipe = defaultdict(list)
    for meta, recipe in zip(metadata, recipes):
        name2recipe[meta.get_value("package/name")].append(recipe)

    def get_inner_deps(dependencies):
        for dep in dependencies:
            name = dep.split()[0]
            if name in name2recipe:
                yield name

    dag = nx.DiGraph()
    dag.add_nodes_from(meta.get_value("package/name") for meta in metadata)
    for meta in metadata:
        name = meta.get_value("package/name")
        dag.add_edges_from((dep, name) for dep in set(
            get_inner_deps(chain(get_deps(meta), get_deps(meta,
                                                          build=False)))))

    #nx.relabel_nodes(dag, name2recipe, copy=False)
    return dag, name2recipe
Beispiel #35
0
def package_built_name(package, root_dir):
    package_dir = os.path.join(root_dir, package)
    if hasattr(conda_build, 'api'):
        return conda_build.api.get_output_file_path(package_dir)
    else:
        meta = MetaData(package_dir)
        return bldpkg_path(meta)
Beispiel #36
0
def test_native_compiler_metadata_win(testing_config, py_ver, mocker):
    variant = {'python': py_ver[0]}
    testing_config._platform = 'win'
    metadata = MetaData(os.path.join(metadata_dir, '_compiler_jinja2'),
                        config=testing_config,
                        variant=variant)
    assert py_ver[1] in metadata.meta['requirements']['build']
    def test_py_xx_version(self):
        recipe = """
            package:
                name: recipe_which_depends_on_py_version
                version: 2
            requirements:
                build:
                 - python >=2.7
                 - numpy
                run:
                 - python x.x
                 - numpy x.x
            """
        with open(os.path.join(self.recipe_dir, 'meta.yaml'), 'w') as fh:
            fh.write(recipe)
        conda_build.config.config.CONDA_PY = 35
        conda_build.config.config.CONDA_NPY = 17

        meta = MetaData(self.recipe_dir)

        self.index.add_pkg('python', '2.7.2')
        self.index.add_pkg('python', '2.6.2')
        self.index.add_pkg('python', '3.5.0')
        self.index.add_pkg('numpy', '1.8.0', depends=['python'])
        r = BakedDistribution.compute_matrix(meta, self.index)
        self.assertEqual(len(r), 2)
        self.assertEqual(r[0].build_id(), 'np18py27_0')
        self.assertEqual(r[1].build_id(), 'np18py35_0')
def meta_of_feedstock(forge_dir):
    recipe_dir = 'recipe'
    meta_dir = os.path.join(forge_dir, recipe_dir)
    if not os.path.exists(meta_dir):
        raise IOError("The given directory isn't a feedstock.")
    meta = MetaData(meta_dir)
    return meta
def build_recipe(recipe):
    def build(py=None):
        try:
            out = None if args.verbose else sp.PIPE
            py = ["--python", py] if py is not None else []
            sp.run(["conda", "build", "--no-anaconda-upload"] + py +
                   ["--skip-existing", "--quiet", recipe],
                   stderr=out,
                   stdout=out,
                   check=True,
                   universal_newlines=True,
                   env=os.environ)
            return True
        except sp.CalledProcessError as e:
            if e.stdout is not None:
                print(e.stdout)
                print(e.stderr)
            return False

    conda_index()
    if "python" not in get_deps(MetaData(recipe), build=False):
        success = build()
    else:
        # use list to enforce all builds
        success = all(list(map(build, PYTHON_VERSIONS)))

    if not success:
        # fail if all builds result in an error
        assert False, "At least one build of recipe {} failed.".format(recipe)
Beispiel #40
0
def render_recipe(recipe_path, config, no_download_source=False):
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    if config.set_build_id:
        # updates a unique build id if not already computed
        config.compute_build_id(os.path.basename(recipe_dir))
    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    m, need_download, need_reparse_in_env = parse_or_try_download(m,
                                                no_download_source=no_download_source,
                                                config=config)
    if need_download and no_download_source:
        raise ValueError("no_download_source specified, but can't fully render recipe without"
                         " downloading source.  Please fix the recipe, or don't use "
                         "no_download_source.")
    config.noarch = bool(m.get_value('build/noarch'))

    if need_cleanup:
        rm_rf(recipe_dir)

    return m, need_download, need_reparse_in_env
Beispiel #41
0
 def __call__(self, args):
     if not os.path.isdir(args.recipe_directory):
         raise IOError("The recipe directory should be the directory of the conda-recipe. Got {}".format(args.recipe_directory))
     meta = MetaData(args.recipe_directory)
     feedstock_directory = args.feedstock_directory.format(package=argparse.Namespace(name=meta.name()))
     generate_feedstock_content(feedstock_directory, args.recipe_directory)
     if not args.no_git_repo:
         create_git_repo(feedstock_directory, meta)
Beispiel #42
0
def testing_metadata(request, testing_config):
    d = defaultdict(dict)
    d['package']['name'] = request.function.__name__
    d['package']['version'] = '1.0'
    d['build']['number'] = '1'
    d['build']['entry_points'] = []
    d['requirements']['build'] = ['python']
    d['requirements']['run'] = ['python']
    d['test']['commands'] = ['echo "A-OK"', 'exit 0']
    d['about']['home'] = "sweet home"
    d['about']['license'] = "contract in blood"
    d['about']['summary'] = "a test package"
    testing_config.variant = get_default_variants()[0]
    return MetaData.fromdict(d, config=testing_config)
def main():
    token = os.environ.get('BINSTAR_TOKEN')

    description = ('Upload or check consistency of a built version of a '
                   'conda recipe with binstar. Note: The existence of the '
                   'BINSTAR_TOKEN environment variable determines '
                   'whether the upload should actually take place.')
    parser = argparse.ArgumentParser(description=description)
    parser.add_argument('recipe_dir', help='the conda recipe directory')
    parser.add_argument('owner', help='the binstar owner/user')
    parser.add_argument('--channel', help='the binstar channel', default='main')
    args = parser.parse_args()
    recipe_dir, owner, channel = args.recipe_dir, args.owner, args.channel

    cli = get_binstar(argparse.Namespace(token=token, site=None))
    meta = MetaData(recipe_dir)
    if meta.skip():
        print("No upload to take place - this configuration was skipped in build/skip.")
        return
    exists = built_distribution_already_exists(cli, meta, owner)
    if token:
        on_channel = distribution_exists_on_channel(cli, meta, owner, channel)
        if not exists:
            upload(cli, meta, owner, channel)
            print('Uploaded {}'.format(bldpkg_path(meta)))
        elif not on_channel:
            print('Adding distribution {} to {}\'s {} channel'
                  ''.format(bldpkg_path(meta), owner, channel))
            add_distribution_to_channel(cli, meta, owner, channel)
        else:
            print('Distribution {} already \nexists on {}\'s {} channel.'
                  ''.format(bldpkg_path(meta), owner, channel))
    else:
        print("No BINSTAR_TOKEN present, so no upload is taking place. "
              "The distribution just built {} already available on {}'s "
              "{} channel.".format('is' if exists else 'is not',
                                   owner, channel))
def main():
    recipe_dir = os.environ["RECIPE_DIR"]
    conda_platform = 'win-32' if os.environ["ARCH"] == '32' else 'win-64'
    prefix = os.environ['PREFIX']

    metadata = MetaData(recipe_dir)
    msys2_tar_xz_url = metadata.get_section(
        'extra')['msys2-binaries'][conda_platform]['url']
    msys2_md5 = metadata.get_section(
        'extra')['msys2-binaries'][conda_platform]['md5']
    mv_srcs_list = metadata.get_section(
        'extra')['msys2-binaries'][conda_platform]['mv-srcs']
    mv_dsts_list = metadata.get_section(
        'extra')['msys2-binaries'][conda_platform]['mv-dsts']
    msys2_tar_xz = get_tar_xz(msys2_tar_xz_url, msys2_md5)
    tar = tarfile.open(msys2_tar_xz, 'r|xz')
    tar.extractall(path=prefix)

    try:
        patches = metadata.get_section(
            'extra')['msys2-binaries'][conda_platform]['patches']
    except:
        patches = []
    if len(patches):
        for patchname in patches:
            patchset = patch.fromfile(join(getenv('RECIPE_DIR'), patchname))
            patchset.apply(1, root=prefix)

    # shutil is a bit funny (like mv) with regards to how it treats
    # the destination depending on whether it is an existing directory or not
    # (i.e. moving into that versus moving as that).
    # Therefore, the rules employed are:
    # 1. If mv_dst ends with a '/' it is a directory that you want mv_src
    #    moved into.
    # 2. If mv_src has a wildcard, mv_dst is a directory that you want mv_src
    #    moved into.
    # In these cases we makedirs(mv_dst) and then call move(mv_src, mv_dst)
    # .. otherwise we makedirs(dirname(mv_dst)) and call move(mv_src, mv_dst)
    # .. however, if no mv_srcs exist we don't makedirs at all.
    for mv_src, mv_dst in zip(mv_srcs_list, mv_dsts_list):
        mv_dst_definitely_dir = False
        mv_srcs = glob(join(prefix, normpath(mv_src)))
        if '*' in mv_src or mv_dst.endswith('/') or len(mv_srcs) > 1:
            mv_dst_definitely_dir = True
        if len(mv_srcs):
            mv_dst = join(prefix, normpath(mv_dst))
            mv_dst_mkdir = mv_dst
            if not mv_dst_definitely_dir:
                mv_dst_mkdir = dirname(mv_dst_mkdir)
            try:
                makedirs(mv_dst_mkdir)
            except:
                pass
            for mv_src in mv_srcs:
                move(mv_src, mv_dst)
    tar.close()
def execute(args, parser):
    d = defaultdict(dict)
    d['package']['name'] = args.name
    d['package']['version'] = args.version
    d['build']['number'] = args.build_number
    d['build']['entry_points'] = args.entry_points
    # MetaData does the auto stuff if the build string is None
    d['build']['string'] = args.build_string
    d['requirements']['run'] = args.dependencies
    d['about']['home'] = args.home
    d['about']['license'] = args.license
    d['about']['summary'] = args.summary
    m = MetaData.fromdict(d)

    build(m)
    handle_binstar_upload(bldpkg_path(m), args)
Beispiel #46
0
def test_metadata(request, test_config):
    d = defaultdict(dict)
    d['package']['name'] = request.function.__name__
    d['package']['version'] = '1.0'
    d['build']['number'] = '1'
    d['build']['entry_points'] = []
    # MetaData does the auto stuff if the build string is None
    d['build']['string'] = None
    d['requirements']['build'] = ['python']
    d['requirements']['run'] = ['python']
    d['test']['commands'] = ['echo "A-OK"', 'exit 0']
    d['about']['home'] = "sweet home"
    d['about']['license'] = "contract in blood"
    d['about']['summary'] = "a test package"

    return MetaData.fromdict(d, config=test_config)
Beispiel #47
0
def create_metapackage(name, version, entry_points=(), build_string=None, build_number=0,
                       dependencies=(), home=None, license_name=None, summary=None, config=None):
    # local import to avoid circular import, we provid create_metapackage in api
    from conda_build.api import build

    if not config:
        config = Config()

    d = defaultdict(dict)
    d['package']['name'] = name
    d['package']['version'] = version
    d['build']['number'] = build_number
    d['build']['entry_points'] = entry_points
    # MetaData does the auto stuff if the build string is None
    d['build']['string'] = build_string
    d['requirements']['run'] = dependencies
    d['about']['home'] = home
    d['about']['license'] = license_name
    d['about']['summary'] = summary
    d = dict(d)
    m = MetaData.fromdict(d, config=config)
    config.compute_build_id(m.name())

    return build(m, config=config, need_source_download=False)
Beispiel #48
0
from conda_build.metadata import MetaData


res = list()
for meta_path in open(sys.argv[1]):
    input_dir = os.path.join( './bioconda-recipes', os.path.dirname(meta_path) )
    if os.path.exists(input_dir):

        for arch in ['osx-', 'linux-64']:
            package = dict()
            package['arch'] = arch
            # set the architechture before parsing the metadata
            cc.subdir = arch

            recipe_meta = MetaData(input_dir)
            package['name'] = recipe_meta.get_value('package/name')
            package['version'] = recipe_meta.get_value('package/version')
            url = recipe_meta.get_value('source/url')
            if url:
                package['sha256'] = recipe_meta.get_value('source/sha256')
                package['md5'] = recipe_meta.get_value('source/md5')
            else:
                # git_url and hopefully git_rev
                git_url = recipe_meta.get_value('source/git_url')
                git_rev = recipe_meta.get_value('source/git_rev')
                url = '%s/%s.tar.gz' % (git_url.rstrip('.git'), git_rev)
                if not git_rev:
                    sys.exit('git revision is missing for: %s' % input_dir)
            package['url'] = url
            res.append(package)
Beispiel #49
0
 def __call__(self, args):
     meta = MetaData(args.recipe_directory)
     feedstock_directory = args.feedstock_directory.format(package=argparse.Namespace(name=meta.name()))
     generate_feedstock_content(feedstock_directory, args.recipe_directory)
     if not args.no_git_repo:
         create_git_repo(feedstock_directory, meta)
Beispiel #50
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os import makedirs
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()

    # change globals in build module, see comment there as well
    build.channel_urls = args.channel or ()
    build.override_channels = args.override_channels
    build.verbose = not args.quiet

    if on_win:
        try:
            # needs to happen before any c extensions are imported that might be
            # hard-linked by files in the trash. one of those is markupsafe,
            # used by jinja2. see https://github.com/conda/conda-build/pull/520
            delete_trash(None)
        except:
            # when we can't delete the trash, don't crash on AssertionError,
            # instead inform the user and try again next time.
            # see https://github.com/conda/conda-build/pull/744
            warnings.warn("Cannot delete trash; some c extension has been "
                          "imported that is hard-linked by files in the trash. "
                          "Will try again on next run.")

    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
        'perl': 'CONDA_PERL',
        'R': 'CONDA_R',
        }

    for lang in ['python', 'numpy', 'perl', 'R']:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = versions[0]
            if lang in ('python', 'numpy'):
                version = int(version.replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                    (conda_version[lang], all_versions[lang][-1]/10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                    (conda_version[lang], version))

    # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc.
    # Auto-set those env variables
    for var in conda_version.values():
        if getattr(config, var):
            # Set the env variable.
            os_environ[var] = str(getattr(config, var))

    if args.skip_existing:
        if not isdir(config.bldpkgs_dir):
            makedirs(config.bldpkgs_dir)
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(clear_cache=True)

    already_built = []
    to_build_recursive = []
    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or 'utf-8')
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value('build/noarch_python'):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index or m.pkg_fn() in already_built:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if m.skip():
                print("Skipped: The %s recipe defines build/skip for this "
                      "configuration." % m.dist())
                continue
            if args.output:
                try:
                    m.parse_again(permit_undefined_jinja=False)
                except SystemExit:
                    # Something went wrong; possibly due to undefined GIT_ jinja variables.
                    # Maybe we need to actually download the source in order to resolve the build_id.
                    source.provide(m.path, m.get_section('source'))
                    
                    # Parse our metadata again because we did not initialize the source
                    # information before.
                    m.parse_again(permit_undefined_jinja=False)

                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, move_broken=False)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m, post=post,
                                include_recipe=args.include_recipe)
                except (RuntimeError, SystemExit) as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found') or error_str.startswith('Could not find some'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                if dep_pkg in to_build_recursive:
                                    sys.exit(str(e))
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                                to_build_recursive.append(dep_pkg)
                        else:
                            raise
                    elif error_str.strip().startswith("Hint:"):
                        lines = [line for line in error_str.splitlines() if line.strip().startswith('- ')]
                        pkgs = [line.lstrip('- ') for line in lines]
                        # Typically if a conflict is with one of these
                        # packages, the other package needs to be rebuilt
                        # (e.g., a conflict with 'python 3.5*' and 'x' means
                        # 'x' isn't build for Python 3.5 and needs to be
                        # rebuilt).
                        skip_names = ['python', 'r']
                        pkgs = [pkg for pkg in pkgs if pkg.split(' ')[0] not
                            in skip_names]
                        for pkg in pkgs:
                            # Handle package names that contain version deps.
                            if ' ' in pkg:
                                pkg = pkg.split(' ')[0]
                            recipe_glob = glob(pkg + '-[v0-9][0-9.]*')
                            if exists(pkg):
                                recipe_glob.append(pkg)
                            if recipe_glob:
                                recipes.appendleft(arg)
                                try_again = True
                                for recipe_dir in recipe_glob:
                                    if pkg in to_build_recursive:
                                        sys.exit(str(e))
                                    print(error_str)
                                    print(("Missing dependency {0}, but found" +
                                           " recipe directory, so building " +
                                           "{0} first").format(pkg))
                                    recipes.appendleft(recipe_dir)
                                    to_build_recursive.append(pkg)
                            else:
                                raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m)

                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)

            already_built.append(m.pkg_fn())
Beispiel #51
0
    except binstar_client.errors.NotFound:
        dist_info = {}

    return bool(dist_info)

def upload(cli, meta, owner):
    try:
        with open('token', 'w') as fh:
            fh.write(cli.token)

        subprocess.check_call(['anaconda', '--quiet', '-t', 'token',
                               'upload', 'artifacts/{}.tar.bz2'.format(meta.dist()),
                               '--user={}'.format(owner)], env=os.environ)
    finally:
        os.remove('token')

if __name__ == '__main__':
    token = os.environ.get('TOKEN')
    owner = 'bioasp'

    cli = get_binstar(argparse.Namespace(token=token, site=None))

    meta = MetaData('recipe')
    exists = artifact_already_exists(cli, meta, owner)

    if not exists:
        upload(cli, meta, owner)
        print('Uploaded {}'.format(meta.dist()))
    else:
        print('Distribution {} already exists'.format(meta.dist()))
Beispiel #52
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()
    channel_urls = args.channel or ()

    if on_win:
        # needs to happen before any c extensions are imported that might be
        # hard-linked by files in the trash. one of those is markupsafe, used
        # by jinja2. see https://github.com/conda/conda-build/pull/520
        assert 'markupsafe' not in sys.modules
        delete_trash(None)

    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
        'perl': 'CONDA_PERL',
        'R': 'CONDA_R',
        }

    for lang in ['python', 'numpy', 'perl', 'R']:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = int(versions[0].replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) == 2 and lang in ['python', 'numpy']:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                    (conda_version[lang], all_versions[lang][-1]/10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                    (conda_version[lang], version))

    if args.skip_existing:
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(clear_cache=True,
            channel_urls=channel_urls,
            override_channels=args.override_channels)

    already_built = []
    to_build = args.recipe[:]
    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or 'utf-8')
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value('build/noarch_python'):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index or m.pkg_fn() in already_built:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, verbose=not args.quiet,
                    channel_urls=channel_urls, override_channels=args.override_channels)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m, verbose=not args.quiet, post=post,
                        channel_urls=channel_urls,
                        override_channels=args.override_channels, include_recipe=args.include_recipe)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found') or error_str.startswith('Could not find some'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                if dep_pkg in to_build:
                                    sys.exit(str(e))
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                                to_build.append(dep_pkg)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m, verbose=not args.quiet,
                        channel_urls=channel_urls, override_channels=args.override_channels)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)

            already_built.append(m.pkg_fn())
Beispiel #53
0
    def run(self):
        # Make sure the metadata has the conda attributes, even if the
        # distclass isn't CondaDistribution. We primarily do this to simplify
        # the code below.

        metadata = self.distribution.metadata

        for attr in CondaDistribution.conda_attrs:
            if not hasattr(metadata, attr):
                setattr(metadata, attr,
                    CondaDistribution.conda_attrs[attr])

        # The command line takes precedence
        if self.buildnum is not None:
            metadata.conda_buildnum = self.buildnum

        with Locked(config.croot):
            d = defaultdict(dict)
            # PyPI allows uppercase letters but conda does not, so we fix the
            # name here.
            d['package']['name'] = metadata.name.lower()
            d['package']['version'] = metadata.version
            d['build']['number'] = metadata.conda_buildnum

            # MetaData does the auto stuff if the build string is None
            d['build']['string'] = metadata.conda_buildstr

            d['build']['binary_relocation'] = metadata.conda_binary_relocation
            d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir
            d['build']['features'] = metadata.conda_features
            d['build']['track_features'] = metadata.conda_track_features

            # XXX: I'm not really sure if it is correct to combine requires
            # and install_requires
            d['requirements']['run'] = d['requirements']['build'] = \
                [spec_from_line(i) for i in
                    (metadata.requires or []) +
                    (getattr(self.distribution, 'install_requires', []) or
                        [])] + ['python']
            if hasattr(self.distribution, 'tests_require'):
                # A lot of packages use extras_require['test'], but
                # tests_require is the one that is officially supported by
                # setuptools.
                d['test']['requires'] = [spec_from_line(i) for i in
                    self.distribution.tests_require or []]

            d['about']['home'] = metadata.url
            # Don't worry about classifiers. This isn't skeleton pypi. We
            # don't need to make this work with random stuff in the wild. If
            # someone writes their setup.py wrong and this doesn't work, it's
            # their fault.
            d['about']['license'] = metadata.license
            d['about']['summary'] = metadata.description

            # This is similar logic from conda skeleton pypi
            entry_points = getattr(self.distribution, 'entry_points', [])
            if entry_points:
                if isinstance(entry_points, string_types):
                    # makes sure it is left-shifted
                    newstr = "\n".join(x.strip() for x in
                        entry_points.splitlines())
                    c = configparser.ConfigParser()
                    entry_points = {}
                    try:
                        c.readfp(StringIO(newstr))
                    except Exception as err:
                        # This seems to be the best error here
                        raise DistutilsGetoptError("ERROR: entry-points not understood: " +
                                                   str(err) + "\nThe string was" + newstr)
                    else:
                        for section in config.sections():
                            if section in ['console_scripts', 'gui_scripts']:
                                value = ['%s=%s' % (option, config.get(section, option))
                                         for option in config.options(section)]
                                entry_points[section] = value
                            else:
                                # Make sure setuptools is added as a dependency below
                                entry_points[section] = None

                if not isinstance(entry_points, dict):
                    raise DistutilsGetoptError("ERROR: Could not add entry points. They were:\n" +
                                               entry_points)
                else:
                    rs = entry_points.get('scripts', [])
                    cs = entry_points.get('console_scripts', [])
                    gs = entry_points.get('gui_scripts', [])
                    # We have *other* kinds of entry-points so we need
                    # setuptools at run-time
                    if not rs and not cs and not gs and len(entry_points) > 1:
                        d['requirements']['run'].append('setuptools')
                        d['requirements']['build'].append('setuptools')
                    entry_list = rs + cs + gs
                    if gs and conda.config.platform == 'osx':
                        d['build']['osx_is_app'] = True
                    if len(cs + gs) != 0:
                        d['build']['entry_points'] = entry_list
                        if metadata.conda_command_tests is True:
                            d['test']['commands'] = list(map(unicode,
                                                             pypi.make_entry_tests(entry_list)))

            if 'setuptools' in d['requirements']['run']:
                d['build']['preserve_egg_dir'] = True

            if metadata.conda_import_tests:
                if metadata.conda_import_tests is True:
                    d['test']['imports'] = ((self.distribution.packages or []) +
                                            (self.distribution.py_modules or []))
                else:
                    d['test']['imports'] = metadata.conda_import_tests

            if (metadata.conda_command_tests and not
                    isinstance(metadata.conda_command_tests,
                    bool)):
                d['test']['commands'] = list(map(unicode, metadata.conda_command_tests))

            d = dict(d)
            m = MetaData.fromdict(d)
            # Shouldn't fail, but do you really trust the code above?
            m.check_fields()
            build.build(m, post=False)
            # Do the install
            if not PY3:
                # Command is an old-style class in Python 2
                install.run(self)
            else:
                super().run()
            build.build(m, post=True)
            build.test(m)
            if self.binstar_upload:
                class args:
                    binstar_upload = self.binstar_upload
                handle_binstar_upload(render.bldpkg_path(m), args)
            else:
                no_upload_message = """\
# If you want to upload this package to anaconda.org later, type:
#
# $ anaconda upload %s
""" % render.bldpkg_path(m)
                print(no_upload_message)
Beispiel #54
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()

    if args.python:
        if args.python == ['all']:
            for py in [26, 27, 33, 34]:
                args.python = [str(py)]
                execute(args, parser)
            return
        if len(args.python) > 1:
            for py in args.python[:]:
                args.python = [py]
                execute(args, parser)
        else:
            config.CONDA_PY = int(args.python[0].replace('.', ''))
    if args.perl:
        config.CONDA_PERL = args.perl
    if args.numpy:
        if args.numpy == ['all']:
            for npy in [16, 17, 18]:
                args.numpy = [str(npy)]
                execute(args, parser)
            return
        if len(args.numpy) > 1:
            for npy in args.numpy[:]:
                args.numpy = [npy]
                execute(args, parser)
        else:
            config.CONDA_NPY = int(args.numpy[0].replace('.', ''))

    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding())
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            m = MetaData(recipe_dir)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, verbose=not args.quiet)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m, verbose=not args.quiet, post=post)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found matching:'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m, verbose=not args.quiet)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
 def setUp(self):
     self.meta = MetaData.fromdict({'package': {'name': 'test_pkg'},
                                    'requirements': {'build': []}})
Beispiel #56
0
def get_noarch_python_meta(meta):
    d = meta.meta
    d['build']['noarch'] = "python"
    return MetaData.fromdict(d, config=meta.config)
Beispiel #57
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import croot
    from conda_build.metadata import MetaData

    check_external()

    with Locked(croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding())
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            m = MetaData(recipe_dir)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                try:
                    build.build(m)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found matching:'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1].replace(' ', '-')
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
Beispiel #58
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os import makedirs
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()
    channel_urls = args.channel or ()

    if on_win:
        # needs to happen before any c extensions are imported that might be
        # hard-linked by files in the trash. one of those is markupsafe, used
        # by jinja2. see https://github.com/conda/conda-build/pull/520
        assert "markupsafe" not in sys.modules
        delete_trash(None)

    conda_version = {"python": "CONDA_PY", "numpy": "CONDA_NPY", "perl": "CONDA_PERL", "R": "CONDA_R"}

    for lang in ["python", "numpy", "perl", "R"]:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ["all"]:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = versions[0]
            if lang in ("python", "numpy"):
                version = int(version.replace(".", ""))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) in (2, 3) and lang in ["python", "numpy"]:
            if all_versions[lang]:
                raise RuntimeError(
                    "%s must be major.minor, like %s, not %s"
                    % (conda_version[lang], all_versions[lang][-1] / 10, version)
                )
            else:
                raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version))

    # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc.
    # Auto-set those env variables
    for var in conda_version.values():
        if getattr(config, var):
            # Set the env variable.
            os_environ[var] = str(getattr(config, var))

    if args.skip_existing:
        if not isdir(config.bldpkgs_dir):
            makedirs(config.bldpkgs_dir)
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(
            clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels
        )

    already_built = []
    to_build_recursive = []
    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or "utf-8")
            if isfile(arg):
                if arg.endswith((".tar", ".tar.gz", ".tgz", ".tar.bz2")):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, "r:*")
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value("build/noarch_python"):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index or m.pkg_fn() in already_built:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(
                    m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels
                )
            elif args.source:
                source.provide(m.path, m.get_section("source"))
                print("Source tree in:", source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    if m.skip():
                        print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist())
                        continue
                    build.build(
                        m,
                        verbose=not args.quiet,
                        post=post,
                        channel_urls=channel_urls,
                        override_channels=args.override_channels,
                        include_recipe=args.include_recipe,
                    )
                except (RuntimeError, SystemExit) as e:
                    error_str = str(e)
                    if error_str.startswith("No packages found") or error_str.startswith("Could not find some"):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(": ")[1]
                        # Handle package names that contain version deps.
                        if " " in dep_pkg:
                            dep_pkg = dep_pkg.split(" ")[0]
                        recipe_glob = glob(dep_pkg + "-[v0-9][0-9.]*")
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                if dep_pkg in to_build_recursive:
                                    sys.exit(str(e))
                                print(
                                    (
                                        "Missing dependency {0}, but found"
                                        + " recipe directory, so building "
                                        + "{0} first"
                                    ).format(dep_pkg)
                                )
                                recipes.appendleft(recipe_dir)
                                to_build_recursive.append(dep_pkg)
                        else:
                            raise
                    elif error_str.strip().startswith("Hint:"):
                        lines = [line for line in error_str.splitlines() if line.strip().startswith("- ")]
                        pkgs = [line.lstrip("- ") for line in lines]
                        # Typically if a conflict is with one of these
                        # packages, the other package needs to be rebuilt
                        # (e.g., a conflict with 'python 3.5*' and 'x' means
                        # 'x' isn't build for Python 3.5 and needs to be
                        # rebuilt).
                        skip_names = ["python", "r"]
                        pkgs = [pkg for pkg in pkgs if pkg.split(" ")[0] not in skip_names]
                        for pkg in pkgs:
                            # Handle package names that contain version deps.
                            if " " in pkg:
                                pkg = pkg.split(" ")[0]
                            recipe_glob = glob(pkg + "-[v0-9][0-9.]*")
                            if exists(pkg):
                                recipe_glob.append(pkg)
                            if recipe_glob:
                                recipes.appendleft(arg)
                                try_again = True
                                for recipe_dir in recipe_glob:
                                    if pkg in to_build_recursive:
                                        sys.exit(str(e))
                                    print(error_str)
                                    print(
                                        (
                                            "Missing dependency {0}, but found"
                                            + " recipe directory, so building "
                                            + "{0} first"
                                        ).format(pkg)
                                    )
                                    recipes.appendleft(recipe_dir)
                                    to_build_recursive.append(pkg)
                            else:
                                raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(
                        m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels
                    )
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)

            already_built.append(m.pkg_fn())
Beispiel #59
0
def build_wheel(recipe, versions_combis={"python": None, "numpy": None},
                conda_channel_urls=(),
                conda_override_channels=(),
                upload=[],
                wheel_dir="./build"):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    from conda_build.config import config
    from conda_build.metadata import MetaData

    import conda_build_wheel.build_wheel as build

    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
    }
    for lang in ['python', 'numpy']:
        versions = versions_combis[lang]
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                print("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                versions_combis[lang] = [str(ver)]
                build_wheel(recipe, versions_combis, conda_channel_urls=conda_channel_urls,
                            conda_override_channels=conda_override_channels,
                            upload=upload, wheel_dir=wheel_dir)
                # This is necessary to make all combinations build.
                versions_combis[lang] = versions
            return
        else:
            version = versions[0]
            if lang in ('python', 'numpy'):
                version = int(version.replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                                   (conda_version[lang],
                                    all_versions[lang][-1] / 10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                                   (conda_version[lang], version))

    # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc.
    # Auto-set those env variables
    for var in conda_version.values():
        if getattr(config, var):
            # Set the env variable.
            os_environ[var] = str(getattr(config, var))

    with Locked(config.croot):
        # Don't use byte literals for paths in Python 2
        if not PY3:
            recipe = recipe.decode(getpreferredencoding() or 'utf-8')
        if isfile(recipe):
            if recipe.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                recipe_dir = tempfile.mkdtemp()
                t = tarfile.open(recipe, 'r:*')
                t.extractall(path=recipe_dir)
                t.close()
                need_cleanup = True
            else:
                print("Ignoring non-recipe: %s" % recipe)
                return
        else:
            recipe_dir = abspath(recipe)
            need_cleanup = False

        if not isdir(recipe_dir):
            sys.exit("Error: no such directory: %s" % recipe_dir)

        try:
            m = MetaData(recipe_dir)
            if m.get_value('build/noarch_python'):
                config.noarch = True
        except exceptions.YamlParsingError as e:
            sys.stderr.write(e.error_msg())
            sys.exit(1)
        m.check_fields()

        if m.skip():
            print(
                "Skipped: The %s recipe defines build/skip for this "
                "configuration." % m.dist())
            return
        build.build(m, channel_urls=conda_channel_urls,
                    override_channels=conda_override_channels, wheel_dir=wheel_dir)

        if need_cleanup:
            shutil.rmtree(recipe_dir)