Exemple #1
0
def test_skip_existing_url(testing_metadata, testing_workdir, capfd):
    # make sure that it is built
    outputs = api.build(testing_metadata)

    # Copy our package into some new folder
    output_dir = os.path.join(testing_workdir, 'someoutput')
    platform = os.path.join(output_dir, testing_metadata.config.host_subdir)
    os.makedirs(platform)
    copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0])))

    # create the index so conda can find the file
    api.update_index(platform, config=testing_metadata.config)

    # HACK: manually create noarch location there, so that conda 4.3.2+ considers a valid channel
    noarch = os.path.join(output_dir, 'noarch')
    os.makedirs(noarch)
    api.update_index(noarch, config=testing_metadata.config)

    testing_metadata.config.skip_existing = True
    testing_metadata.config.channel_urls = [url_path(output_dir)]

    api.build(testing_metadata)

    output, error = capfd.readouterr()
    assert "are already built" in output
    assert url_path(testing_metadata.config.croot) in output
def test_skip_existing_url(testing_metadata, testing_workdir, capfd):
    # make sure that it is built
    outputs = api.build(testing_metadata)

    # Copy our package into some new folder
    output_dir = os.path.join(testing_workdir, 'someoutput')
    platform = os.path.join(output_dir, testing_metadata.config.host_subdir)
    os.makedirs(platform)
    copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0])))

    # create the index so conda can find the file
    api.update_index(platform, config=testing_metadata.config)

    # HACK: manually create noarch location there, so that conda 4.3.2+ considers a valid channel
    noarch = os.path.join(output_dir, 'noarch')
    os.makedirs(noarch)
    api.update_index(noarch, config=testing_metadata.config)

    testing_metadata.config.skip_existing = True
    testing_metadata.config.channel_urls = [url_path(output_dir)]

    api.build(testing_metadata)

    output, error = capfd.readouterr()
    assert "are already built" in output
    assert url_path(testing_metadata.config.croot) in output
Exemple #3
0
def execute(args):
    p, args = parse_args(args)

    config = get_or_merge_config(None, **args.__dict__)
    variants = get_package_variants(args.recipe, config)
    set_language_env_vars(variants)

    channel_urls = args.__dict__.get('channel') or args.__dict__.get('channels') or ()
    config.channel_urls = []

    for url in channel_urls:
        # allow people to specify relative or absolute paths to local channels
        #    These channels still must follow conda rules - they must have the
        #    appropriate platform-specific subdir (e.g. win-64)
        if os.path.isdir(url):
            if not os.path.isabs(url):
                url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url)))
            url = url_path(url)
        config.channel_urls.append(url)

    config.override_channels = args.override_channels

    metadata_tuples = api.render(args.recipe, config=config,
                                 no_download_source=args.no_source)

    if args.output:
        with LoggingContext(logging.CRITICAL + 1):
            config.verbose = False
            config.debug = False
            paths = api.get_output_file_paths(metadata_tuples, config=config)
            print('\n'.join(sorted(paths)))
    else:
        logging.basicConfig(level=logging.INFO)
        for (m, _, _) in metadata_tuples:
            print(api.output_yaml(m, args.file))
Exemple #4
0
def execute(args):
    parser, args = parse_args(args)
    config = Config(**args.__dict__)
    build.check_external()

    # change globals in build module, see comment there as well
    channel_urls = args.__dict__.get('channel') or args.__dict__.get(
        'channels') or ()
    config.channel_urls = []

    for url in channel_urls:
        # allow people to specify relative or absolute paths to local channels
        #    These channels still must follow conda rules - they must have the
        #    appropriate platform-specific subdir (e.g. win-64)
        if os.path.isdir(url):
            if not os.path.isabs(url):
                url = os.path.normpath(
                    os.path.abspath(os.path.join(os.getcwd(), url)))
            url = url_path(url)
        config.channel_urls.append(url)

    config.override_channels = args.override_channels
    config.verbose = not args.quiet or args.debug

    if 'purge' in args.recipe:
        build.clean_build(config)
        return

    if 'purge-all' in args.recipe:
        build.clean_build(config)
        config.clean_pkgs()
        return

    action = None
    if args.output:
        action = output_action
        config.verbose = False
        config.quiet = True
        config.debug = False
    elif args.test:
        action = test_action
    elif args.source:
        action = source_action
    elif args.check:
        action = check_action

    if action:
        outputs = [action(recipe, config) for recipe in args.recipe]
    else:
        outputs = api.build(args.recipe,
                            post=args.post,
                            build_only=args.build_only,
                            notest=args.notest,
                            already_built=None,
                            config=config,
                            noverify=args.no_verify)

    if not args.output and len(utils.get_build_folders(config.croot)) > 0:
        build.print_build_intermediate_warning(config)
    return outputs
def execute(args):
    parser, args = parse_args(args)
    config = Config(**args.__dict__)
    build.check_external()

    # change globals in build module, see comment there as well
    channel_urls = args.channel or ()
    config.channel_urls = []

    for url in channel_urls:
        # allow people to specify relative or absolute paths to local channels
        #    These channels still must follow conda rules - they must have the
        #    appropriate platform-specific subdir (e.g. win-64)
        if os.path.isdir(url):
            if not os.path.isabs(url):
                url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url)))
            url = url_path(url)
        config.channel_urls.append(url)

    config.override_channels = args.override_channels
    config.verbose = not args.quiet or args.debug

    if 'purge' in args.recipe:
        build.clean_build(config)
        return

    if 'purge-all' in args.recipe:
        build.clean_build(config)
        config.clean_pkgs()
        return

    set_language_env_vars(args, parser, config=config, execute=execute)

    action = None
    if args.output:
        action = output_action
        logging.basicConfig(level=logging.ERROR)
        config.verbose = False
        config.quiet = True
    elif args.test:
        action = test_action
    elif args.source:
        action = source_action
    elif args.check:
        action = check_action

    if action:
        for recipe in args.recipe:
            action(recipe, config)
        outputs = []

    else:
        outputs = api.build(args.recipe, post=args.post, build_only=args.build_only,
                            notest=args.notest, already_built=None, config=config,
                            noverify=args.no_verify)

    if not args.output and len(utils.get_build_folders(config.croot)) > 0:
        build.print_build_intermediate_warning(config)
    return outputs
Exemple #6
0
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs):
    import os
    from conda_build.conda_interface import url_path
    from conda_build.build import test
    from conda_build.render import render_recipe
    from conda_build.utils import get_recipe_abspath, rm_rf
    from conda_build import source

    config = get_or_merge_config(config, **kwargs)

    # we want to know if we're dealing with package input.  If so, we can move the input on success.
    is_package = False

    if hasattr(recipedir_or_package_or_metadata, 'config'):
        metadata = recipedir_or_package_or_metadata
        recipe_config = metadata.config
    else:
        recipe_dir, need_cleanup = get_recipe_abspath(recipedir_or_package_or_metadata)
        config.need_cleanup = need_cleanup

        # This will create a new local build folder if and only if config doesn't already have one.
        #   What this means is that if we're running a test immediately after build, we use the one
        #   that the build already provided
        metadata, _, _ = render_recipe(recipe_dir, config=config)
        recipe_config = config
        # this recipe came from an extracted tarball.
        if need_cleanup:
            # ensure that the local location of the package is indexed, so that conda can find the
            #    local package
            local_location = os.path.dirname(recipedir_or_package_or_metadata)
            # strip off extra subdir folders
            for platform in ('win', 'linux', 'osx'):
                if os.path.basename(local_location).startswith(platform + "-"):
                    local_location = os.path.dirname(local_location)
            update_index(local_location, config=config)
            local_url = url_path(local_location)
            # channel_urls is an iterable, but we don't know if it's a tuple or list.  Don't know
            #    how to add elements.
            recipe_config.channel_urls = list(recipe_config.channel_urls)
            recipe_config.channel_urls.insert(0, local_url)
            is_package = True
            if metadata.meta.get('test') and metadata.meta['test'].get('source_files'):
                source.provide(metadata.path, metadata.get_section('source'), config=config)
            rm_rf(recipe_dir)

    with recipe_config:
        # This will create a new local build folder if and only if config doesn't already have one.
        #   What this means is that if we're running a test immediately after build, we use the one
        #   that the build already provided

        recipe_config.compute_build_id(metadata.name())
        test_result = test(metadata, config=recipe_config, move_broken=move_broken)

        if (test_result and is_package and hasattr(recipe_config, 'output_folder') and
                recipe_config.output_folder):
            os.rename(recipedir_or_package_or_metadata,
                      os.path.join(recipe_config.output_folder,
                                   os.path.basename(recipedir_or_package_or_metadata)))
    return test_result
def test_skip_existing_url(testing_workdir, test_config, capfd):
    # make sure that it is built
    api.build(empty_sections, config=test_config)
    output_file = os.path.join(test_config.croot, test_config.subdir, "empty_sections-0.0-0.tar.bz2")

    platform = os.path.join(testing_workdir, test_config.subdir)
    copy_into(output_file, os.path.join(platform, os.path.basename(output_file)))

    # create the index so conda can find the file
    api.update_index(platform, config=test_config)

    api.build(os.path.join(metadata_dir, "empty_sections"), skip_existing=True,
              config=test_config, channel_urls=[url_path(testing_workdir)])

    output, error = capfd.readouterr()
    assert "is already built" in output
    assert url_path(test_config.croot) in output
def test_skip_existing_url(test_metadata, testing_workdir, capfd):
    # make sure that it is built
    output_file = api.get_output_file_path(test_metadata)
    api.build(test_metadata)

    # Copy our package into some new folder
    platform = os.path.join(testing_workdir, test_metadata.config.subdir)
    copy_into(output_file, os.path.join(platform, os.path.basename(output_file)))

    # create the index so conda can find the file
    api.update_index(platform, config=test_metadata.config)

    test_metadata.config.skip_existing = True
    test_metadata.config.channel_urls = [url_path(testing_workdir)]
    api.build(test_metadata)

    output, error = capfd.readouterr()
    assert "is already built" in output
    assert url_path(test_metadata.config.croot) in output
Exemple #9
0
def execute(args, print_results=True):
    p, args = parse_args(args)

    config = get_or_merge_config(None, **args.__dict__)

    variants = get_package_variants(args.recipe,
                                    config,
                                    variants=args.variants)
    set_language_env_vars(variants)

    channel_urls = args.__dict__.get('channel') or args.__dict__.get(
        'channels') or ()
    config.channel_urls = []

    for url in channel_urls:
        # allow people to specify relative or absolute paths to local channels
        #    These channels still must follow conda rules - they must have the
        #    appropriate platform-specific subdir (e.g. win-64)
        if os.path.isdir(url):
            if not os.path.isabs(url):
                url = os.path.normpath(
                    os.path.abspath(os.path.join(os.getcwd(), url)))
            url = url_path(url)
        config.channel_urls.append(url)

    config.override_channels = args.override_channels

    if args.output:
        config.verbose = False
        config.debug = False

    metadata_tuples = api.render(args.recipe,
                                 config=config,
                                 no_download_source=args.no_source,
                                 variants=args.variants)

    if print_results:
        if args.output:
            with LoggingContext(logging.CRITICAL + 1):
                paths = api.get_output_file_paths(metadata_tuples,
                                                  config=config)
                print('\n'.join(sorted(paths)))
        else:
            logging.basicConfig(level=logging.INFO)
            for (m, _, _) in metadata_tuples:
                print("--------------")
                print("Hash contents:")
                print("--------------")
                pprint(m.get_hash_contents())
                print("----------")
                print("meta.yaml:")
                print("----------")
                print(api.output_yaml(m, args.file, suppress_outputs=True))
    else:
        return metadata_tuples
Exemple #10
0
def test_skip_existing_url(test_metadata, testing_workdir, capfd):
    # make sure that it is built
    outputs = api.build(test_metadata)

    # Copy our package into some new folder
    output_dir = os.path.join(testing_workdir, 'someoutput')
    platform = os.path.join(output_dir, test_metadata.config.subdir)
    os.makedirs(platform)
    copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0])))

    # create the index so conda can find the file
    api.update_index(platform, config=test_metadata.config)

    test_metadata.config.skip_existing = True
    test_metadata.config.channel_urls = [url_path(output_dir)]
    api.build(test_metadata)

    output, error = capfd.readouterr()
    assert "is already built" in output
    assert url_path(test_metadata.config.croot) in output
def test_skip_existing_url(testing_workdir, test_config, capfd):
    # make sure that it is built
    api.build(empty_sections, config=test_config)
    output_file = os.path.join(test_config.croot, test_config.subdir,
                               "empty_sections-0.0-0.tar.bz2")

    platform = os.path.join(testing_workdir, test_config.subdir)
    copy_into(output_file, os.path.join(platform,
                                        os.path.basename(output_file)))

    # create the index so conda can find the file
    api.update_index(platform, config=test_config)

    api.build(os.path.join(metadata_dir, "empty_sections"),
              skip_existing=True,
              config=test_config,
              channel_urls=[url_path(testing_workdir)])

    output, error = capfd.readouterr()
    assert "is already built" in output
    assert url_path(test_config.croot) in output
Exemple #12
0
def download_to_cache(cache_folder, recipe_path, source_dict):
    ''' Download a source to the local cache. '''
    print('Source cache directory is: %s' % cache_folder)
    if not isdir(cache_folder):
        os.makedirs(cache_folder)

    fn = source_dict['fn'] if 'fn' in source_dict else basename(
        source_dict['url'])
    path = join(cache_folder, fn)
    if isfile(path):
        print('Found source in cache: %s' % fn)
    else:
        print('Downloading source to cache: %s' % fn)
        if not isinstance(source_dict['url'], list):
            source_dict['url'] = [source_dict['url']]

        for url in source_dict['url']:
            if "://" not in url:
                if url.startswith('~'):
                    url = expanduser(url)
                if not os.path.isabs(url):
                    url = os.path.normpath(os.path.join(recipe_path, url))
                url = url_path(url)
            else:
                if url.startswith('file:///~'):
                    url = 'file:///' + expanduser(url[8:]).replace('\\', '/')
            try:
                print("Downloading %s" % url)
                with LoggingContext():
                    download(url, path)
            except CondaHTTPError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
            except RuntimeError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
            else:
                print("Success")
                break
        else:  # no break
            raise RuntimeError("Could not download %s" % url)

    for tp in ('md5', 'sha1', 'sha256'):
        try:
            expected_hash = source_dict[tp]
            hashed = hashsum_file(path, tp)
            if expected_hash != hashed:
                raise RuntimeError("%s mismatch: '%s' != '%s'" %
                                   (tp.upper(), hashed, expected_hash))
        except KeyError:
            continue

    return path
Exemple #13
0
def download_to_cache(cache_folder, recipe_path, source_dict):
    ''' Download a source to the local cache. '''
    print('Source cache directory is: %s' % cache_folder)
    if not isdir(cache_folder):
        os.makedirs(cache_folder)

    fn = source_dict['fn'] if 'fn' in source_dict else basename(source_dict['url'])
    path = join(cache_folder, fn)
    if isfile(path):
        print('Found source in cache: %s' % fn)
    else:
        print('Downloading source to cache: %s' % fn)
        if not isinstance(source_dict['url'], list):
            source_dict['url'] = [source_dict['url']]

        for url in source_dict['url']:
            if "://" not in url:
                if url.startswith('~'):
                    url = expanduser(url)
                if not os.path.isabs(url):
                    url = os.path.normpath(os.path.join(recipe_path, url))
                url = url_path(url)
            else:
                if url.startswith('file:///~'):
                    url = 'file:///' + expanduser(url[8:]).replace('\\', '/')
            try:
                print("Downloading %s" % url)
                with LoggingContext():
                    download(url, path)
            except CondaHTTPError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
            except RuntimeError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
            else:
                print("Success")
                break
        else:  # no break
            raise RuntimeError("Could not download %s" % fn)

    for tp in ('md5', 'sha1', 'sha256'):
        try:
            expected_hash = source_dict[tp]
            hashed = hashsum_file(path, tp)
            if expected_hash != hashed:
                raise RuntimeError("%s mismatch: '%s' != '%s'" %
                           (tp.upper(), hashed, expected_hash))
        except KeyError:
            continue

    return path
Exemple #14
0
def make_conda_config(config, python, append_file, condarc_options):
    """Creates a conda configuration for a build merging various sources.

    This function will use the conda-build API to construct a configuration by
    merging different sources of information.

    Args:

      config: Path leading to the ``conda_build_config.yaml`` to use
      python: The version of python to use for the build as ``x.y`` (e.g.
        ``3.6``)
      append_file: Path leading to the ``recipe_append.yaml`` file to use
      condarc_options: A dictionary (typically read from a condarc YAML file)
        that contains build and channel options

    Returns: A dictionary containing the merged configuration, as produced by
    conda-build API's ``get_or_merge_config()`` function.
    """

    with root_logger_protection():
        from conda_build.conda_interface import url_path

        retval = conda_build.api.get_or_merge_config(
            None,
            variant_config_files=config,
            python=python,
            append_sections_file=append_file,
            **condarc_options,
        )

    retval.channel_urls = []

    for url in condarc_options["channels"]:
        # allow people to specify relative or absolute paths to local channels
        #    These channels still must follow conda rules - they must have the
        #    appropriate platform-specific subdir (e.g. win-64)
        if os.path.isdir(url):
            if not os.path.isabs(url):
                url = os.path.normpath(
                    os.path.abspath(os.path.join(os.getcwd(), url)))
            with root_logger_protection():
                url = url_path(url)
        retval.channel_urls.append(url)

    return retval
Exemple #15
0
def download_to_cache(metadata, config):
    ''' Download a source to the local cache. '''
    print('Source cache directory is: %s' % config.src_cache)
    if not isdir(config.src_cache):
        os.makedirs(config.src_cache)
    meta = metadata.get_section('source')

    fn = meta['fn'] if 'fn' in meta else basename(meta['url'])
    path = join(config.src_cache, fn)
    if isfile(path):
        print('Found source in cache: %s' % fn)
    else:
        print('Downloading source to cache: %s' % fn)
        if not isinstance(meta['url'], list):
            meta['url'] = [meta['url']]

        for url in meta['url']:
            if "://" not in url:
                if not os.path.isabs(url):
                    url = os.path.normpath(os.path.join(metadata.path, url))
                url = url_path(url)
            try:
                print("Downloading %s" % url)
                download(url, path)
            except CondaHTTPError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
            except RuntimeError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
            else:
                print("Success")
                break
        else:  # no break
            raise RuntimeError("Could not download %s" % fn)

    for tp in 'md5', 'sha1', 'sha256':
        if meta.get(tp) and hashsum_file(path, tp) != meta[tp]:
            raise RuntimeError("%s mismatch: '%s' != '%s'" %
                               (tp.upper(), hashsum_file(path, tp), meta[tp]))

    return path
Exemple #16
0
def download_to_cache(metadata, config):
    ''' Download a source to the local cache. '''
    print('Source cache directory is: %s' % config.src_cache)
    if not isdir(config.src_cache):
        os.makedirs(config.src_cache)
    meta = metadata.get_section('source')

    fn = meta['fn'] if 'fn' in meta else basename(meta['url'])
    path = join(config.src_cache, fn)
    if isfile(path):
        print('Found source in cache: %s' % fn)
    else:
        print('Downloading source to cache: %s' % fn)
        if not isinstance(meta['url'], list):
            meta['url'] = [meta['url']]

        for url in meta['url']:
            if "://" not in url:
                if not os.path.isabs(url):
                    url = os.path.normpath(os.path.join(metadata.path, url))
                url = url_path(url)
            try:
                print("Downloading %s" % url)
                download(url, path)
            except RuntimeError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
            else:
                print("Success")
                break
        else:  # no break
            raise RuntimeError("Could not download %s" % fn)

    for tp in 'md5', 'sha1', 'sha256':
        if meta.get(tp) and hashsum_file(path, tp) != meta[tp]:
            raise RuntimeError("%s mismatch: '%s' != '%s'" %
                               (tp.upper(), hashsum_file(path, tp), meta[tp]))

    return path
Exemple #17
0
def execute(args):
    _parser, args = parse_args(args)
    config = Config(**args.__dict__)
    build.check_external()

    # change globals in build module, see comment there as well
    channel_urls = args.__dict__.get('channel') or args.__dict__.get(
        'channels') or ()
    config.channel_urls = []

    for url in channel_urls:
        # allow people to specify relative or absolute paths to local channels
        #    These channels still must follow conda rules - they must have the
        #    appropriate platform-specific subdir (e.g. win-64)
        if os.path.isdir(url):
            if not os.path.isabs(url):
                url = os.path.normpath(
                    os.path.abspath(os.path.join(os.getcwd(), url)))
            url = url_path(url)
        config.channel_urls.append(url)

    config.override_channels = args.override_channels
    config.verbose = not args.quiet or args.debug

    if 'purge' in args.recipe:
        build.clean_build(config)
        return

    if 'purge-all' in args.recipe:
        build.clean_build(config)
        config.clean_pkgs()
        return

    action = None
    outputs = None
    if args.output:
        action = output_action
        config.verbose = False
        config.quiet = True
        config.debug = False
    elif args.test:
        action = test_action
    elif args.source:
        action = source_action
    elif args.check:
        action = check_action

    if action == test_action:
        failed_recipes = []
        recipes = [
            item for sublist in [
                glob(os.path.abspath(recipe)) if '*' in recipe else [recipe]
                for recipe in args.recipe
            ] for item in sublist
        ]
        for recipe in recipes:
            try:
                action(recipe, config)
            except:
                if not args.keep_going:
                    raise
                else:
                    failed_recipes.append(recipe)
                    continue
        if failed_recipes:
            print("Failed recipes:")
            for recipe in failed_recipes:
                print("  - %s" % recipe)
            sys.exit(len(failed_recipes))
        else:
            print("All tests passed")
        outputs = []

    elif action:
        outputs = [action(recipe, config) for recipe in args.recipe]
    else:
        outputs = api.build(args.recipe,
                            post=args.post,
                            build_only=args.build_only,
                            notest=args.notest,
                            already_built=None,
                            config=config,
                            verify=args.verify,
                            variants=args.variants)

    if not args.output and len(utils.get_build_folders(config.croot)) > 0:
        build.print_build_intermediate_warning(config)
    return outputs
Exemple #18
0
def download_to_cache(cache_folder, recipe_path, source_dict):
    ''' Download a source to the local cache. '''
    print('Source cache directory is: %s' % cache_folder)
    if not isdir(cache_folder):
        os.makedirs(cache_folder)

    source_urls = source_dict['url']
    if not isinstance(source_urls, list):
        source_urls = [source_urls]
    unhashed_fn = fn = source_dict['fn'] if 'fn' in source_dict else basename(source_urls[0])
    hash_added = False
    for hash_type in ('md5', 'sha1', 'sha256'):
        if hash_type in source_dict:
            fn = append_hash_to_fn(fn, source_dict[hash_type])
            hash_added = True
            break
    else:
        log = get_logger(__name__)
        log.warn("No hash (md5, sha1, sha256) provided for {}.  Source download forced.  "
                 "Add hash to recipe to use source cache.".format(unhashed_fn))
    path = join(cache_folder, fn)
    if isfile(path):
        print('Found source in cache: %s' % fn)
    else:
        print('Downloading source to cache: %s' % fn)

        for url in source_urls:
            if "://" not in url:
                if url.startswith('~'):
                    url = expanduser(url)
                if not os.path.isabs(url):
                    url = os.path.normpath(os.path.join(recipe_path, url))
                url = url_path(url)
            else:
                if url.startswith('file:///~'):
                    url = 'file:///' + expanduser(url[8:]).replace('\\', '/')
            try:
                print("Downloading %s" % url)
                with LoggingContext():
                    download(url, path)
            except CondaHTTPError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
                rm_rf(path)
            except RuntimeError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
                rm_rf(path)
            else:
                print("Success")
                break
        else:  # no break
            rm_rf(path)
            raise RuntimeError("Could not download %s" % url)

    hashed = None
    for tp in ('md5', 'sha1', 'sha256'):
        if tp in source_dict:
            expected_hash = source_dict[tp]
            hashed = hashsum_file(path, tp)
            if expected_hash != hashed:
                rm_rf(path)
                raise RuntimeError("%s mismatch: '%s' != '%s'" %
                           (tp.upper(), hashed, expected_hash))
            break

    # this is really a fallback.  If people don't provide the hash, we still need to prevent
    #    collisions in our source cache, but the end user will get no benefirt from the cache.
    if not hash_added:
        if not hashed:
            hashed = hashsum_file(path, 'sha256')
        dest_path = append_hash_to_fn(path, hashed)
        if not os.path.isfile(dest_path):
            shutil.move(path, dest_path)
        path = dest_path

    return path, unhashed_fn
Exemple #19
0
def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False):
    ''' Download a source to the local cache. '''
    if verbose:
        log.info('Source cache directory is: %s' % cache_folder)
    if not isdir(cache_folder) and not os.path.islink(cache_folder):
        os.makedirs(cache_folder)

    source_urls = source_dict['url']
    if not isinstance(source_urls, list):
        source_urls = [source_urls]
    unhashed_fn = fn = source_dict['fn'] if 'fn' in source_dict else basename(
        source_urls[0])
    hash_added = False
    for hash_type in ('md5', 'sha1', 'sha256'):
        if hash_type in source_dict:
            if source_dict[hash_type] in (None, ""):
                raise ValueError('Empty {} hash provided for {}'.format(
                    hash_type, fn))
            fn = append_hash_to_fn(fn, source_dict[hash_type])
            hash_added = True
            break
    else:
        log.warn(
            "No hash (md5, sha1, sha256) provided for {}.  Source download forced.  "
            "Add hash to recipe to use source cache.".format(unhashed_fn))
    path = join(cache_folder, fn)
    if isfile(path):
        if verbose:
            log.info('Found source in cache: %s' % fn)
    else:
        if verbose:
            log.info('Downloading source to cache: %s' % fn)

        for url in source_urls:
            if "://" not in url:
                if url.startswith('~'):
                    url = expanduser(url)
                if not os.path.isabs(url):
                    url = os.path.normpath(os.path.join(recipe_path, url))
                url = url_path(url)
            else:
                if url.startswith('file:///~'):
                    url = 'file:///' + expanduser(url[8:]).replace('\\', '/')
            try:
                if verbose:
                    log.info("Downloading %s" % url)
                with LoggingContext():
                    download(url, path)
            except CondaHTTPError as e:
                log.warn("Error: %s" % str(e).strip())
                rm_rf(path)
            except RuntimeError as e:
                log.warn("Error: %s" % str(e).strip())
                rm_rf(path)
            else:
                if verbose:
                    log.info("Success")
                break
        else:  # no break
            rm_rf(path)
            raise RuntimeError("Could not download %s" % url)

    hashed = None
    for tp in ('md5', 'sha1', 'sha256'):
        if tp in source_dict:
            expected_hash = source_dict[tp]
            hashed = hashsum_file(path, tp)
            if expected_hash != hashed:
                rm_rf(path)
                raise RuntimeError("%s mismatch: '%s' != '%s'" %
                                   (tp.upper(), hashed, expected_hash))
            break

    # this is really a fallback.  If people don't provide the hash, we still need to prevent
    #    collisions in our source cache, but the end user will get no benefirt from the cache.
    if not hash_added:
        if not hashed:
            hashed = hashsum_file(path, 'sha256')
        dest_path = append_hash_to_fn(path, hashed)
        if not os.path.isfile(dest_path):
            shutil.move(path, dest_path)
        path = dest_path

    return path, unhashed_fn
Exemple #20
0
def _construct_metadata_for_test_from_package(package, config):
    recipe_dir, need_cleanup = utils.get_recipe_abspath(package)
    config.need_cleanup = need_cleanup
    config.recipe_dir = recipe_dir
    hash_input = {}

    info_dir = os.path.normpath(os.path.join(recipe_dir, "info"))
    with open(os.path.join(info_dir, "index.json")) as f:
        package_data = json.load(f)

    if package_data["subdir"] != "noarch":
        config.host_subdir = package_data["subdir"]
    # We may be testing an (old) package built without filename hashing.
    hash_input = os.path.join(info_dir, "hash_input.json")
    if os.path.isfile(hash_input):
        with open(os.path.join(info_dir, "hash_input.json")) as f:
            hash_input = json.load(f)
    else:
        config.filename_hashing = False
        hash_input = {}
    # not actually used as a variant, since metadata will have been finalized.
    #    This is still necessary for computing the hash correctly though
    config.variant = hash_input
    log = utils.get_logger(__name__)

    # get absolute file location
    local_pkg_location = os.path.normpath(os.path.abspath(os.path.dirname(package)))

    # get last part of the path
    last_element = os.path.basename(local_pkg_location)
    is_channel = False
    for platform in ("win-", "linux-", "osx-", "noarch"):
        if last_element.startswith(platform):
            is_channel = True

    if not is_channel:
        log.warn(
            "Copying package to conda-build croot.  No packages otherwise alongside yours will"
            " be available unless you specify -c local.  To avoid this warning, your package "
            "must reside in a channel structure with platform-subfolders.  See more info on "
            "what a valid channel is at "
            "https://conda.io/docs/user-guide/tasks/create-custom-channels.html"
        )

        local_dir = os.path.join(config.croot, config.host_subdir)
        mkdir_p(local_dir)
        local_pkg_location = os.path.join(local_dir, os.path.basename(package))
        utils.copy_into(package, local_pkg_location)
        local_pkg_location = local_dir

    local_channel = os.path.dirname(local_pkg_location)

    # update indices in the channel
    update_index(local_channel, verbose=config.debug, threads=1)

    try:
        # raise IOError()
        # metadata = render_recipe(
        #     os.path.join(info_dir, "recipe"), config=config, reset_build_id=False
        # )[0][0]

        metadata = get_metadata(os.path.join(info_dir, "recipe", "recipe.yaml"), config)
        # with open(os.path.join(info_dir, "recipe", "recipe.yaml")) as fi:
        # metadata = yaml.load(fi)
    # no recipe in package.  Fudge metadata
    except SystemExit:
        # force the build string to line up - recomputing it would
        #    yield a different result
        metadata = MetaData.fromdict(
            {
                "package": {
                    "name": package_data["name"],
                    "version": package_data["version"],
                },
                "build": {
                    "number": int(package_data["build_number"]),
                    "string": package_data["build"],
                },
                "requirements": {"run": package_data["depends"]},
            },
            config=config,
        )
    # HACK: because the recipe is fully baked, detecting "used" variables no longer works.  The set
    #     of variables in the hash_input suffices, though.

    if metadata.noarch:
        metadata.config.variant["target_platform"] = "noarch"

    metadata.config.used_vars = list(hash_input.keys())
    urls = list(utils.ensure_list(metadata.config.channel_urls))
    local_path = url_path(local_channel)
    # replace local with the appropriate real channel.  Order is maintained.
    urls = [url if url != "local" else local_path for url in urls]
    if local_path not in urls:
        urls.insert(0, local_path)
    metadata.config.channel_urls = urls
    utils.rm_rf(metadata.config.test_dir)
    return metadata, hash_input
def execute(args):
    parser, args = parse_args(args)
    config = Config(**args.__dict__)
    build.check_external()

    # change globals in build module, see comment there as well
    channel_urls = args.__dict__.get('channel') or args.__dict__.get('channels') or ()
    config.channel_urls = []

    for url in channel_urls:
        # allow people to specify relative or absolute paths to local channels
        #    These channels still must follow conda rules - they must have the
        #    appropriate platform-specific subdir (e.g. win-64)
        if os.path.isdir(url):
            if not os.path.isabs(url):
                url = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(), url)))
            url = url_path(url)
        config.channel_urls.append(url)

    config.override_channels = args.override_channels
    config.verbose = not args.quiet or args.debug

    if 'purge' in args.recipe:
        build.clean_build(config)
        return

    if 'purge-all' in args.recipe:
        build.clean_build(config)
        config.clean_pkgs()
        return

    action = None
    outputs = None
    if args.output:
        action = output_action
        config.verbose = False
        config.quiet = True
        config.debug = False
    elif args.test:
        action = test_action
    elif args.source:
        action = source_action
    elif args.check:
        action = check_action

    if action == test_action:
        failed_recipes = []
        recipes = [item for sublist in
                   [glob(os.path.abspath(recipe)) if '*' in recipe
                                                  else [recipe] for recipe in args.recipe]
                   for item in sublist]
        for recipe in recipes:
            try:
                action(recipe, config)
            except:
                if not args.keep_going:
                    raise
                else:
                    failed_recipes.append(recipe)
                    continue
        if failed_recipes:
            print("Failed recipes:")
            for recipe in failed_recipes:
                print("  - %s" % recipe)
            sys.exit(len(failed_recipes))
        else:
            print("All tests passed")
        outputs = []

    elif action:
        outputs = [action(recipe, config) for recipe in args.recipe]
    else:
        outputs = api.build(args.recipe, post=args.post, build_only=args.build_only,
                            notest=args.notest, already_built=None, config=config,
                            verify=args.verify, variants=args.variants)

    if not args.output and len(utils.get_build_folders(config.croot)) > 0:
        build.print_build_intermediate_warning(config)
    return outputs
Exemple #22
0
def download_to_cache(cache_folder, recipe_path, source_dict):
    ''' Download a source to the local cache. '''
    print('Source cache directory is: %s' % cache_folder)
    if not isdir(cache_folder):
        os.makedirs(cache_folder)

    fn = source_dict['fn'] if 'fn' in source_dict else basename(
        source_dict['url'])
    hash_added = False
    for hash_type in ('md5', 'sha1', 'sha256'):
        if hash_type in source_dict:
            fn = append_hash_to_fn(fn, source_dict[hash_type])
            hash_added = True
            break
    else:
        log = get_logger(__name__)
        log.warn(
            "No hash (md5, sha1, sha256) provided.  Source download forced.  "
            "Add hash to recipe to use source cache.")
    path = join(cache_folder, fn)
    if isfile(path):
        print('Found source in cache: %s' % fn)
    else:
        print('Downloading source to cache: %s' % fn)
        if not isinstance(source_dict['url'], list):
            source_dict['url'] = [source_dict['url']]

        for url in source_dict['url']:
            if "://" not in url:
                if url.startswith('~'):
                    url = expanduser(url)
                if not os.path.isabs(url):
                    url = os.path.normpath(os.path.join(recipe_path, url))
                url = url_path(url)
            else:
                if url.startswith('file:///~'):
                    url = 'file:///' + expanduser(url[8:]).replace('\\', '/')
            try:
                print("Downloading %s" % url)
                with LoggingContext():
                    download(url, path)
            except CondaHTTPError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
            except RuntimeError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
            else:
                print("Success")
                break
        else:  # no break
            raise RuntimeError("Could not download %s" % url)

    hashed = None
    for tp in ('md5', 'sha1', 'sha256'):
        if 'tp' in source_dict:
            expected_hash = source_dict[tp]
            hashed = hashsum_file(path, tp)
            if expected_hash != hashed:
                raise RuntimeError("%s mismatch: '%s' != '%s'" %
                                   (tp.upper(), hashed, expected_hash))
            break

    # this is really a fallback.  If people don't provide the hash, we still need to prevent
    #    collisions in our source cache, but the end user will get no benefirt from the cache.
    if not hash_added:
        if not hashed:
            hashed = hashsum_file(path, 'sha256')
        dest_path = append_hash_to_fn(path, hashed)
        os.rename(path, dest_path)
        path = dest_path

    return path