Beispiel #1
0
def get_repository_info(recipe_path):
    """This tries to get information about where a recipe came from.  This is different
    from the source - you can have a recipe in svn that gets source via git."""
    try:
        if exists(join(recipe_path, ".git")):
            origin = check_output_env(
                ["git", "config", "--get", "remote.origin.url"],
                cwd=recipe_path)
            rev = check_output_env(["git", "rev-parse", "HEAD"],
                                   cwd=recipe_path)
            return "Origin {}, commit {}".format(origin, rev)
        elif isdir(join(recipe_path, ".hg")):
            origin = check_output_env(["hg", "paths", "default"],
                                      cwd=recipe_path)
            rev = check_output_env(["hg", "id"], cwd=recipe_path).split()[0]
            return "Origin {}, commit {}".format(origin, rev)
        elif isdir(join(recipe_path, ".svn")):
            info = check_output_env(["svn", "info"], cwd=recipe_path)
            info = info.decode(
                "utf-8"
            )  # Py3 returns a byte string, but re needs unicode or str.
            server = re.search("Repository Root: (.*)$", info,
                               flags=re.M).group(1)
            revision = re.search("Revision: (.*)$", info, flags=re.M).group(1)
            return "{}, Revision {}".format(server, revision)
        else:
            return "{}, last modified {}".format(
                recipe_path,
                time.ctime(os.path.getmtime(join(recipe_path, "meta.yaml"))))
    except CalledProcessError:
        get_logger(__name__).debug("Failed to checkout source in " +
                                   recipe_path)
        return "{}, last modified {}".format(
            recipe_path,
            time.ctime(os.path.getmtime(join(recipe_path, "meta.yaml"))))
Beispiel #2
0
def _inspect_linkages_this(filename, sysroot='', arch='native'):
    '''

    :param filename:
    :param sysroot:
    :param arch:
    :return:
    '''

    if not os.path.exists(filename):
        return None, [], []
    sysroot = _trim_sysroot(sysroot)
    arch = _get_arch_if_native(arch)
    with open(filename, 'rb') as f:
        # TODO :: Problems here:
        # TODO :: 1. macOS can modify RPATH for children in each .so
        # TODO :: 2. Linux can identify the program interpreter which can change the default_paths
        try:
            cf = codefile(ReadCheckWrapper(f), arch)
        except IncompleteRead:
            # the file was incomplete, can occur if a package ships a test file
            # which looks like an ELF file but is not.  Orange3 does this.
            get_logger(__name__).warning('problems inspecting linkages for {}'.format(filename))
            return None, [], []
        dirname = os.path.dirname(filename)
        results = cf.get_resolved_shared_libraries(dirname, dirname, sysroot)
        if not results:
            return cf.uniqueness_key(), [], []
        orig_names, resolved_names, _, _in_sysroot = map(list, zip(*results))
        return cf.uniqueness_key(), orig_names, resolved_names
Beispiel #3
0
def get_repository_info(recipe_path):
    """This tries to get information about where a recipe came from.  This is different
    from the source - you can have a recipe in svn that gets source via git."""
    try:
        if exists(join(recipe_path, ".git")):
            origin = check_output_env(["git", "config", "--get", "remote.origin.url"],
                                      cwd=recipe_path)
            rev = check_output_env(["git", "rev-parse", "HEAD"], cwd=recipe_path)
            return "Origin {}, commit {}".format(origin, rev)
        elif isdir(join(recipe_path, ".hg")):
            origin = check_output_env(["hg", "paths", "default"], cwd=recipe_path)
            rev = check_output_env(["hg", "id"], cwd=recipe_path).split()[0]
            return "Origin {}, commit {}".format(origin, rev)
        elif isdir(join(recipe_path, ".svn")):
            info = check_output_env(["svn", "info"], cwd=recipe_path)
            server = re.search("Repository Root: (.*)$", info, flags=re.M).group(1)
            revision = re.search("Revision: (.*)$", info, flags=re.M).group(1)
            return "{}, Revision {}".format(server, revision)
        else:
            return "{}, last modified {}".format(recipe_path,
                                             time.ctime(os.path.getmtime(
                                                 join(recipe_path, "meta.yaml"))))
    except CalledProcessError:
        get_logger(__name__).debug("Failed to checkout source in " + recipe_path)
        return "{}, last modified {}".format(recipe_path,
                                             time.ctime(os.path.getmtime(
                                                 join(recipe_path, "meta.yaml"))))
Beispiel #4
0
def _inspect_linkages_this(filename, sysroot='', arch='native'):
    '''

    :param filename:
    :param sysroot:
    :param arch:
    :return:
    '''

    if not os.path.exists(filename):
        return None, [], []
    sysroot = _trim_sysroot(sysroot)
    arch = _get_arch_if_native(arch)
    with open(filename, 'rb') as f:
        # TODO :: Problems here:
        # TODO :: 1. macOS can modify RPATH for children in each .so
        # TODO :: 2. Linux can identify the program interpreter which can change the default_paths
        try:
            cf = codefile(ReadCheckWrapper(f), arch)
        except IncompleteRead:
            # the file was incomplete, can occur if a package ships a test file
            # which looks like an ELF file but is not.  Orange3 does this.
            get_logger(__name__).warning(
                f'problems inspecting linkages for {filename}')
            return None, [], []
        dirname = os.path.dirname(filename)
        results = cf.get_resolved_shared_libraries(dirname, dirname, sysroot)
        if not results:
            return cf.uniqueness_key(), [], []
        orig_names, resolved_names, _, _in_sysroot = map(list, zip(*results))
        return cf.uniqueness_key(), orig_names, resolved_names
Beispiel #5
0
def verify(path):
    """
    Verify the file `path`, with signature `path`.sig, against the key
    found under ~/.conda/keys/<key_name>.pub.  This function returns:
      - True, if the signature is valid
      - False, if the signature is invalid
    It raises SignatureError when the signature file, or the public key
    does not exist.
    """
    log = utils.get_logger(__name__)
    sig_path = path + '.sig'
    if not isfile(sig_path):
        log.error("signature does not exist: %s", sig_path)
        return False
    with open(sig_path) as fi:
        key_name, sig = fi.read().split()
    if key_name not in KEYS:
        key_path = join(KEYS_DIR, '%s.pub' % key_name)
        if not isfile(key_path):
            log.error("public key does not exist: %s", key_path)
            return False
        KEYS[key_name] = RSA.importKey(open(key_path).read())
    key = KEYS[key_name]
    verifier = PKCS1_PSS.new(key)
    return verifier.verify(hash_file(path), base64.b64decode(sig))
Beispiel #6
0
def test_logger_config_from_file(testing_workdir, caplog, capfd, mocker):
    test_file = os.path.join(testing_workdir, 'build_log_config.yaml')
    with open(test_file, 'w') as f:
        f.write("""
version: 1
formatters:
  simple:
    format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
handlers:
  console:
    class: logging.StreamHandler
    level: WARN
    formatter: simple
    stream: ext://sys.stdout
loggers:
  {}:
    level: WARN
    handlers: [console]
    propagate: no
root:
  level: DEBUG
  handlers: [console]
""".format(__name__))
    cc_conda_build = mocker.patch.object(utils, 'cc_conda_build')
    cc_conda_build.get.return_value = test_file
    log = utils.get_logger(__name__)
    # default log level is INFO, but our config file should set level to DEBUG
    log.warn('test message')
    # output should have gone to stdout according to config above.
    out, err = capfd.readouterr()
    assert 'test message' in out
    # make sure that it is not in stderr - this is testing override of defaults.
    assert 'test message' not in err
Beispiel #7
0
def tests_failed(package_or_metadata, move_broken, broken_dir, config):
    """
    Causes conda to exit if any of the given package's tests failed.

    :param m: Package's metadata
    :type m: Metadata
    """
    if not isdir(broken_dir):
        os.makedirs(broken_dir)

    if hasattr(package_or_metadata, "config"):
        pkg = bldpkg_path(package_or_metadata)
    else:
        pkg = package_or_metadata
    dest = join(broken_dir, os.path.basename(pkg))

    if move_broken:
        log = utils.get_logger(__name__)
        try:
            shutil.move(pkg, dest)
            log.warn(
                "Tests failed for %s - moving package to %s"
                % (os.path.basename(pkg), broken_dir)
            )
        except OSError:
            pass
        update_index(
            os.path.dirname(os.path.dirname(pkg)), verbose=config.debug, threads=1
        )
    sys.exit("TESTS FAILED: " + os.path.basename(pkg))
Beispiel #8
0
def test_logger_config_from_file(testing_workdir, caplog, capfd, mocker):
    test_file = os.path.join(testing_workdir, 'build_log_config.yaml')
    with open(test_file, 'w') as f:
        f.write("""
version: 1
formatters:
  simple:
    format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
handlers:
  console:
    class: logging.StreamHandler
    level: WARN
    formatter: simple
    stream: ext://sys.stdout
loggers:
  {}:
    level: WARN
    handlers: [console]
    propagate: no
root:
  level: DEBUG
  handlers: [console]
""".format(__name__))
    cc_conda_build = mocker.patch.object(utils, 'cc_conda_build')
    cc_conda_build.get.return_value = test_file
    log = utils.get_logger(__name__)
    # default log level is INFO, but our config file should set level to DEBUG
    log.warn('test message')
    # output should have gone to stdout according to config above.
    out, err = capfd.readouterr()
    assert 'test message' in out
    # make sure that it is not in stderr - this is testing override of defaults.
    assert 'test message' not in err
Beispiel #9
0
def find_apple_cctools_executable(name, build_prefix, nofail=False):
    tools = find_preferably_prefixed_executable(name, build_prefix, all_matches=True)
    for tool in tools:
        try:
            if '/usr/bin' in tool:
                with open(tool, 'rb') as f:
                    s = f.read()
                if s.find(b'usr/lib/libxcselect.dylib') != -1:
                    # We ask xcrun.
                    try:
                        tool_xcr = check_output(['xcrun', '-find', name], stderr=STDOUT).decode('utf-8').splitlines()[0]
                    except Exception as e:
                        log = utils.get_logger(__name__)
                        log.error("ERROR :: Found `{}` but is is an Apple Xcode stub executable\n"
                                  "and it returned an error:\n{}".format(tool, e.output))
                        raise e
                    tool = tool_xcr
                    if os.path.exists(tool):
                        return tool
        except Exception as _:  # noqa
            print("ERROR :: Failed to run `{}`.  Please use `conda` to install `cctools` into your base environment.\n"
                  "         An option on macOS is to install `Xcode` or `Command Line Tools for Xcode`."
                  .format(tool))
            sys.exit(1)
        return tool
Beispiel #10
0
def _trim_None_strings(meta_dict):
    log = utils.get_logger(__name__)
    for key, value in meta_dict.items():
        if hasattr(value, 'keys'):
            meta_dict[key] = _trim_None_strings(value)
        elif value and hasattr(value, '__iter__') or isinstance(value, string_types):
            if isinstance(value, string_types):
                meta_dict[key] = None if 'None' in value else value
            else:
                # support lists of dicts (homogeneous)
                keep = []
                if hasattr(value[0], 'keys'):
                    for d in value:
                        trimmed_dict = _trim_None_strings(d)
                        if trimmed_dict:
                            keep.append(trimmed_dict)
                # support lists of strings (homogeneous)
                else:
                    keep = [i for i in value if 'None' not in i]
                meta_dict[key] = keep
        else:
            log.debug("found unrecognized data type in dictionary: {0}, type: {1}".format(value,
                                                                                    type(value)))
    trim_empty_keys(meta_dict)
    return meta_dict
Beispiel #11
0
def get_dict(config, m=None, prefix=None, for_env=True):
    log = utils.get_logger(__name__)
    if not prefix:
        prefix = config.host_prefix

    # conda-build specific vars
    d = conda_build_vars(prefix, config)

    # languages
    d.update(python_vars(config))
    d.update(perl_vars(config))
    d.update(lua_vars(config))

    if m:
        d.update(meta_vars(m, config))

    # system
    d.update(system_vars(d, prefix, config))

    # features
    d.update({feat.upper(): str(int(value)) for feat, value in feature_list})

    for k, v in config.variant.items():
        if not for_env or k.upper() not in d:
            d[k] = v
        else:
            log.debug(
                "Omitting variable %s from env dictionary (already exists)", k)

    return d
Beispiel #12
0
def get_install_actions(prefix, index, specs, config, retries=0):
    log = utils.get_logger(__name__)
    if config.verbose:
        capture = contextlib.contextmanager(lambda: (yield))
    else:
        capture = utils.capture
    actions = {'LINK': []}
    specs = [_ensure_valid_spec(spec) for spec in specs]
    if specs:
        # this is hiding output like:
        #    Fetching package metadata ...........
        #    Solving package specifications: ..........
        with capture():
            try:
                actions = plan.install_actions(prefix, index, specs)
            except NoPackagesFoundError as exc:
                raise DependencyNeedsBuildingError(exc)
            except (SystemExit, PaddingError, LinkError,
                    DependencyNeedsBuildingError, CondaError,
                    AssertionError) as exc:
                if 'lock' in str(exc):
                    log.warn(
                        "failed to get install actions, retrying.  exception was: %s",
                        str(exc))
                elif ('requires a minimum conda version' in str(exc)
                      or 'link a source that does not' in str(exc)
                      or isinstance(exc, AssertionError)):
                    locks = utils.get_conda_operation_locks(config)
                    with utils.try_acquire_locks(locks,
                                                 timeout=config.timeout):
                        pkg_dir = str(exc)
                        folder = 0
                        while os.path.dirname(
                                pkg_dir) not in pkgs_dirs and folder < 20:
                            pkg_dir = os.path.dirname(pkg_dir)
                            folder += 1
                        log.warn(
                            "I think conda ended up with a partial extraction for %s.  "
                            "Removing the folder and retrying", pkg_dir)
                        if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir):
                            utils.rm_rf(pkg_dir)
                if retries < config.max_env_retry:
                    log.warn(
                        "failed to get install actions, retrying.  exception was: %s",
                        str(exc))
                    actions = get_install_actions(prefix,
                                                  index,
                                                  specs,
                                                  config,
                                                  retries=retries + 1)
                else:
                    log.error(
                        "Failed to get install actions, max retries exceeded.")
                    raise
        if config.disable_pip:
            actions['LINK'] = [
                spec for spec in actions['LINK'] if not spec.startswith('pip-')
                and not spec.startswith('setuptools-')
            ]
    return actions
Beispiel #13
0
def get_git_info(repo, config):
    """
    Given a repo to a git repo, return a dictionary of:
      GIT_DESCRIBE_TAG
      GIT_DESCRIBE_NUMBER
      GIT_DESCRIBE_HASH
      GIT_FULL_HASH
      GIT_BUILD_STR
    from the output of git describe.
    :return:
    """
    d = {}
    log = utils.get_logger(__name__)

    if config.verbose:
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stderr = FNULL
        log.setLevel(logging.ERROR)

    # grab information from describe
    env = os.environ.copy()
    env['GIT_DIR'] = repo
    keys = ["GIT_DESCRIBE_TAG", "GIT_DESCRIBE_NUMBER", "GIT_DESCRIBE_HASH"]

    try:
        output = utils.check_output_env(
            ["git", "describe", "--tags", "--long", "HEAD"],
            env=env,
            cwd=os.path.dirname(repo),
            stderr=stderr).splitlines()[0]
        output = output.decode('utf-8')

        parts = output.rsplit('-', 2)
        if len(parts) == 3:
            d.update(dict(zip(keys, parts)))

        # get the _full_ hash of the current HEAD
        output = utils.check_output_env(["git", "rev-parse", "HEAD"],
                                        env=env,
                                        cwd=os.path.dirname(repo),
                                        stderr=stderr).splitlines()[0]
        output = output.decode('utf-8')

        d['GIT_FULL_HASH'] = output
        # set up the build string
        if "GIT_DESCRIBE_NUMBER" in d and "GIT_DESCRIBE_HASH" in d:
            d['GIT_BUILD_STR'] = '{}_{}'.format(d["GIT_DESCRIBE_NUMBER"],
                                                d["GIT_DESCRIBE_HASH"])

        # issues on Windows with the next line of the command prompt being recorded here.
        assert not any("\n" in value for value in d.values())

    except subprocess.CalledProcessError as error:
        log.warn(
            "Error obtaining git information in get_git_info.  Error was: ")
        log.warn(str(error))
    return d
Beispiel #14
0
def get_build_index(config, subdir, clear_cache=False, omit_defaults=False):
    global local_index_timestamp
    global cached_index
    log = utils.get_logger(__name__)
    mtime = 0

    if config.output_folder:
        output_folder = config.output_folder
    else:
        output_folder = os.path.dirname(config.bldpkgs_dir)

    # check file modification time - this is the age of our index.
    index_file = os.path.join(output_folder, subdir, 'repodata.json')
    if os.path.isfile(index_file):
        mtime = os.path.getmtime(index_file)

    if not os.path.isfile(index_file) or mtime > local_index_timestamp:
        log.debug(
            "Building new index for subdir '{}' with channels {}, condarc channels "
            "= {}".format(subdir, config.channel_urls, not omit_defaults))
        # priority: local by croot (can vary), then channels passed as args,
        #     then channels from config.
        if config.debug:
            log_context = partial(utils.LoggingContext, logging.DEBUG)
        elif config.verbose:
            log_context = partial(utils.LoggingContext, logging.INFO)
        else:
            log_context = partial(utils.LoggingContext, logging.CRITICAL + 1)

        urls = list(config.channel_urls)
        if os.path.isdir(output_folder):
            urls.insert(0, url_path(output_folder))
        ensure_valid_channel(output_folder, subdir, config)

        # silence output from conda about fetching index files
        with log_context():
            with utils.capture():
                # replace noarch with native subdir - this ends up building an index with both the
                #      native content and the noarch content.
                if subdir == 'noarch':
                    subdir = conda_interface.subdir
                try:
                    cached_index = get_index(channel_urls=urls,
                                             prepend=not omit_defaults,
                                             use_local=True,
                                             use_cache=False,
                                             platform=subdir)
                # HACK: defaults does not have the many subfolders we support.  Omit it and
                #          try again.
                except CondaHTTPError:
                    if 'defaults' in urls:
                        urls.remove('defaults')
                    cached_index = get_index(channel_urls=urls,
                                             prepend=omit_defaults,
                                             use_local=True,
                                             use_cache=False,
                                             platform=subdir)
        local_index_timestamp = mtime
    return cached_index, local_index_timestamp
Beispiel #15
0
def get_git_info(repo, debug):
    """
    Given a repo to a git repo, return a dictionary of:
      GIT_DESCRIBE_TAG
      GIT_DESCRIBE_NUMBER
      GIT_DESCRIBE_HASH
      GIT_FULL_HASH
      GIT_BUILD_STR
    from the output of git describe.
    :return:
    """
    d = {}
    log = utils.get_logger(__name__)

    if debug:
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stderr = FNULL

    # grab information from describe
    env = os.environ.copy()
    env['GIT_DIR'] = repo
    keys = ["GIT_DESCRIBE_TAG", "GIT_DESCRIBE_NUMBER", "GIT_DESCRIBE_HASH"]

    try:
        output = utils.check_output_env(["git", "describe", "--tags", "--long", "HEAD"],
                                        env=env, cwd=os.path.dirname(repo),
                                        stderr=stderr).splitlines()[0]
        output = output.decode('utf-8')
        parts = output.rsplit('-', 2)
        if len(parts) == 3:
            d.update(dict(zip(keys, parts)))
    except subprocess.CalledProcessError:
        log.debug("Failed to obtain git tag information.  Are you using annotated tags?")

    try:
        # get the _full_ hash of the current HEAD
        output = utils.check_output_env(["git", "rev-parse", "HEAD"],
                                         env=env, cwd=os.path.dirname(repo),
                                         stderr=stderr).splitlines()[0]
        output = output.decode('utf-8')

        d['GIT_FULL_HASH'] = output
    except subprocess.CalledProcessError as error:
        log.debug("Error obtaining git commit information.  Error was: ")
        log.debug(str(error))

    # set up the build string
    if "GIT_DESCRIBE_NUMBER" in d and "GIT_DESCRIBE_HASH" in d:
        d['GIT_BUILD_STR'] = '{}_{}'.format(d["GIT_DESCRIBE_NUMBER"],
                                            d["GIT_DESCRIBE_HASH"])

    # issues on Windows with the next line of the command prompt being recorded here.
    assert not any("\n" in value for value in d.values())
    return d
Beispiel #16
0
 def try_apply_patch(patch, patch_args, cwd, stdout, stderr):
     # An old reference: https://unix.stackexchange.com/a/243748/34459
     #
     # I am worried that '--ignore-whitespace' may be destructive. If so we should
     # avoid passing it, particularly in the initial (most likely to succeed) calls.
     #
     # From here-in I define a 'native' patch as one which has:
     # 1. LF for the patch block metadata.
     # 2. CRLF or LF for the actual patched lines matching those of the source lines.
     #
     # Calls to a raw 'patch' are destructive in various ways:
     # 1. It leaves behind .rej and .orig files
     # 2. If you pass it a patch with incorrect CRLF changes and do not pass --binary and
     #    if any of those blocks *can* be applied, then the whole file gets written out with
     #    LF.  This cannot be reversed either; the text changes will be reversed but not
     #    line-feed changes (since all line-endings get changed, not just those of the of
     #    patched lines)
     # 3. If patching fails, the bits that succeeded remain, so patching is not at all
     #    atomic.
     #
     # Still, we do our best to mitigate all of this as follows:
     # 1. We use --dry-run to test for applicability first.
     # 2 We check for native application of a native patch (--binary, without --ignore-whitespace)
     #
     # Some may bemoan the loss of patch failure artifacts, but it is fairly random which
     # patch and patch attempt they apply to so their informational value is low, besides that,
     # they are ugly.
     #
     import tempfile
     temp_name = os.path.join(tempfile.gettempdir(),
                              next(tempfile._get_candidate_names()))
     base_patch_args = ['--no-backup-if-mismatch', '--batch'
                        ] + patch_args + ['-r', temp_name]
     log = get_logger(__name__)
     try:
         try_patch_args = base_patch_args[:]
         try_patch_args.append('--dry-run')
         log.debug("dry-run applying with\n{} {}".format(
             patch, try_patch_args))
         check_call_env([patch] + try_patch_args,
                        cwd=cwd,
                        stdout=stdout,
                        stderr=stderr)
         # You can use this to pretend the patch failed so as to test reversal!
         # raise CalledProcessError(-1, ' '.join([patch] + patch_args))
     except Exception as e:
         raise e
     else:
         check_call_env([patch] + base_patch_args,
                        cwd=cwd,
                        stdout=stdout,
                        stderr=stderr)
     finally:
         if os.path.exists(temp_name):
             os.unlink(temp_name)
Beispiel #17
0
def test_installable(channel='defaults'):
    success = True
    log = get_logger(__name__)
    has_py = re.compile(r'py(\d)(\d)')
    for platform in ['osx-64', 'linux-32', 'linux-64', 'win-32', 'win-64']:
        log.info("######## Testing platform %s ########", platform)
        channels = [channel]
        index = get_index(channel_urls=channels,
                          prepend=False,
                          platform=platform)
        for _, rec in iteritems(index):
            # If we give channels at the command line, only look at
            # packages from those channels (not defaults).
            if channel != 'defaults' and rec.get('schannel',
                                                 'defaults') == 'defaults':
                continue
            name = rec['name']
            if name in {'conda', 'conda-build'}:
                # conda can only be installed in the root environment
                continue
            if name.endswith('@'):
                # this is a 'virtual' feature record that conda adds to the index for the solver
                # and should be ignored here
                continue
            # Don't fail just because the package is a different version of Python
            # than the default.  We should probably check depends rather than the
            # build string.
            build = rec['build']
            match = has_py.search(build)
            assert match if 'py' in build else True, build
            if match:
                additional_packages = [
                    'python=%s.%s' % (match.group(1), match.group(2))
                ]
            else:
                additional_packages = []

            version = rec['version']
            log.info('Testing %s=%s', name, version)

            try:
                install_steps = check_install([name + '=' + version] +
                                              additional_packages,
                                              channel_urls=channels,
                                              prepend=False,
                                              platform=platform)
                success &= bool(install_steps)
            except KeyboardInterrupt:
                raise
            # sys.exit raises an exception that doesn't subclass from Exception
            except BaseException as e:
                success = False
                log.error("FAIL: %s %s on %s with %s (%s)", name, version,
                          platform, additional_packages, e)
    return success
Beispiel #18
0
def get_build_index(config, subdir, clear_cache=False, omit_defaults=False):
    log = utils.get_logger(__name__)
    log.debug(
        "Building new index for subdir '{}' with channels {}, condarc channels = {}"
        .format(subdir, config.channel_urls, not omit_defaults))
    # priority: local by croot (can vary), then channels passed as args,
    #     then channels from config.
    if config.debug:
        log_context = partial(utils.LoggingContext, logging.DEBUG)
    if config.verbose:
        capture = contextlib.contextmanager(lambda: (yield))
        log_context = partial(utils.LoggingContext, logging.INFO)
    else:
        capture = utils.capture
        log_context = partial(utils.LoggingContext, logging.CRITICAL + 1)

    # Note on conda and indexes:
    #    get_index is unfortunately much more stateful than simply returning an index.
    #    You cannot run get_index on one set of channels, and then later append a different
    #    index from a different set of channels - conda has some other state that it is loading
    #    and your second get_index will invalidate results from the first.   =(

    # global CURRENT_INDEX
    # if CURRENT_INDEX.get(subdir) and not clear_cache:
    #     index = CURRENT_INDEX[subdir]
    # else:
    urls = list(config.channel_urls)
    if os.path.isdir(config.croot):
        urls.insert(0, url_path(config.croot))
    ensure_valid_channel(config.croot, subdir, config)

    # silence output from conda about fetching index files
    with log_context():
        with capture():
            try:
                index = get_index(channel_urls=urls,
                                  prepend=not omit_defaults,
                                  use_local=True,
                                  use_cache=False,
                                  platform=subdir)
            # HACK: defaults does not have the many subfolders we support.  Omit it and try again.
            except CondaHTTPError:
                if 'defaults' in urls:
                    urls.remove('defaults')
                index = get_index(channel_urls=urls,
                                  prepend=omit_defaults,
                                  use_local=True,
                                  use_cache=False,
                                  platform=subdir)
        # CURRENT_INDEX[subdir] = index or {}
    return index
Beispiel #19
0
 def __init__(self, file):
     self.hdr, = struct.unpack(BIG_ENDIAN + 'L', file.read(4))
     self.dt_needed = []
     self.dt_rpath = []
     if self.hdr != ELF_HDR:
         return
     bitness, = struct.unpack(LITTLE_ENDIAN + 'B', file.read(1))
     bitness = 32 if bitness == 1 else 64
     sz_ptr = int(bitness / 8)
     ptr_type = 'Q' if sz_ptr == 8 else 'L'
     self.bitness = bitness
     self.sz_ptr = sz_ptr
     self.ptr_type = ptr_type
     endian, = struct.unpack(LITTLE_ENDIAN + 'B', file.read(1))
     endian = LITTLE_ENDIAN if endian == 1 else BIG_ENDIAN
     self.endian = endian
     self.version, = struct.unpack(endian + 'B', file.read(1))
     self.osabi, = struct.unpack(endian + 'B', file.read(1))
     self.abiver, = struct.unpack(endian + 'B', file.read(1))
     struct.unpack(endian + 'B' * 7, file.read(7))
     self.type, = struct.unpack(endian + 'H', file.read(2))
     self.machine, = struct.unpack(endian + 'H', file.read(2))
     self.version, = struct.unpack(endian + 'L', file.read(4))
     self.entry, = struct.unpack(endian + ptr_type, file.read(sz_ptr))
     self.phoff, = struct.unpack(endian + ptr_type, file.read(sz_ptr))
     self.shoff, = struct.unpack(endian + ptr_type, file.read(sz_ptr))
     self.flags, = struct.unpack(endian + 'L', file.read(4))
     self.ehsize, = struct.unpack(endian + 'H', file.read(2))
     self.phentsize, = struct.unpack(endian + 'H', file.read(2))
     self.phnum, = struct.unpack(endian + 'H', file.read(2))
     self.shentsize, = struct.unpack(endian + 'H', file.read(2))
     self.shnum, = struct.unpack(endian + 'H', file.read(2))
     self.shstrndx, = struct.unpack(endian + 'H', file.read(2))
     loc = file.tell()
     if loc != self.ehsize:
         get_logger(__name__).warning(
             f'file.tell()={loc} != ehsize={self.ehsize}')
Beispiel #20
0
def expand_outputs(metadata_tuples, index):
    """Obtain all metadata objects for all outputs from recipe.  Useful for ouptutting paths."""
    expanded_outputs = {}
    for (_m, download, reparse) in metadata_tuples:
        for (output_dict, m) in _m.get_output_metadata_set():
            if output_dict.get('type') != 'wheel':
                try:
                    m = finalize_metadata(m, index)
                except DependencyNeedsBuildingError:
                    log = utils.get_logger(__name__)
                    log.warn("Could not finalize metadata due to missing dependencies.  "
                                "If building, these should get built in order and it's OK to "
                                "ignore this message..")
                expanded_outputs[m.dist()] = (m, download, reparse)
    return list(expanded_outputs.values())
Beispiel #21
0
 def __init__(self, file):
     self.hdr, = struct.unpack(BIG_ENDIAN + 'L', file.read(4))
     self.dt_needed = []
     self.dt_rpath = []
     if self.hdr != ELF_HDR:
         return
     bitness, = struct.unpack(LITTLE_ENDIAN + 'B', file.read(1))
     bitness = 32 if bitness == 1 else 64
     sz_ptr = int(bitness / 8)
     ptr_type = 'Q' if sz_ptr == 8 else 'L'
     self.bitness = bitness
     self.sz_ptr = sz_ptr
     self.ptr_type = ptr_type
     endian, = struct.unpack(LITTLE_ENDIAN + 'B', file.read(1))
     endian = LITTLE_ENDIAN if endian == 1 else BIG_ENDIAN
     self.endian = endian
     self.version, = struct.unpack(endian + 'B', file.read(1))
     self.osabi, = struct.unpack(endian + 'B', file.read(1))
     self.abiver, = struct.unpack(endian + 'B', file.read(1))
     struct.unpack(endian + 'B' * 7, file.read(7))
     self.type, = struct.unpack(endian + 'H', file.read(2))
     self.machine, = struct.unpack(endian + 'H', file.read(2))
     self.version, = struct.unpack(endian + 'L', file.read(4))
     self.entry, = struct.unpack(endian + ptr_type, file.read(sz_ptr))
     self.phoff, = struct.unpack(endian + ptr_type, file.read(sz_ptr))
     self.shoff, = struct.unpack(endian + ptr_type, file.read(sz_ptr))
     self.flags, = struct.unpack(endian + 'L', file.read(4))
     self.ehsize, = struct.unpack(endian + 'H', file.read(2))
     self.phentsize, = struct.unpack(endian + 'H', file.read(2))
     self.phnum, = struct.unpack(endian + 'H', file.read(2))
     self.shentsize, = struct.unpack(endian + 'H', file.read(2))
     self.shnum, = struct.unpack(endian + 'H', file.read(2))
     self.shstrndx, = struct.unpack(endian + 'H', file.read(2))
     loc = file.tell()
     if loc != self.ehsize:
         get_logger(__name__).warning('file.tell()={} != ehsize={}'.format(loc, self.ehsize))
Beispiel #22
0
def get_build_index(config, subdir, clear_cache=False, omit_defaults=False):
    log = utils.get_logger(__name__)
    log.debug("Building new index for subdir '{}' with channels {}, condarc channels = {}".format(
        subdir, config.channel_urls, not omit_defaults))
    # priority: local by croot (can vary), then channels passed as args,
    #     then channels from config.
    if config.debug:
        log_context = partial(utils.LoggingContext, logging.DEBUG)
    elif config.verbose:
        log_context = partial(utils.LoggingContext, logging.INFO)
    else:
        log_context = partial(utils.LoggingContext, logging.CRITICAL + 1)

    # Note on conda and indexes:
    #    get_index is unfortunately much more stateful than simply returning an index.
    #    You cannot run get_index on one set of channels, and then later append a different
    #    index from a different set of channels - conda has some other state that it is loading
    #    and your second get_index will invalidate results from the first.   =(

    # global CURRENT_INDEX
    # if CURRENT_INDEX.get(subdir) and not clear_cache:
    #     index = CURRENT_INDEX[subdir]
    # else:
    urls = list(config.channel_urls)
    if os.path.isdir(config.croot):
        urls.insert(0, url_path(config.croot))
    ensure_valid_channel(config.croot, subdir, config)

    # silence output from conda about fetching index files
    with log_context():
        with utils.capture():
            try:
                index = get_index(channel_urls=urls,
                                prepend=not omit_defaults,
                                use_local=True,
                                use_cache=False,
                                platform=subdir)
            # HACK: defaults does not have the many subfolders we support.  Omit it and try again.
            except CondaHTTPError:
                if 'defaults' in urls:
                    urls.remove('defaults')
                index = get_index(channel_urls=urls,
                                prepend=omit_defaults,
                                use_local=True,
                                use_cache=False,
                                platform=subdir)
        # CURRENT_INDEX[subdir] = index or {}
    return index
Beispiel #23
0
def test_installable(channel='defaults'):
    success = True
    log = get_logger(__name__)
    has_py = re.compile(r'py(\d)(\d)')
    for platform in ['osx-64', 'linux-32', 'linux-64', 'win-32', 'win-64']:
        log.info("######## Testing platform %s ########", platform)
        channels = [channel]
        index = get_index(channel_urls=channels, prepend=False, platform=platform)
        for _, rec in iteritems(index):
            # If we give channels at the command line, only look at
            # packages from those channels (not defaults).
            if channel != 'defaults' and rec.get('schannel', 'defaults') == 'defaults':
                continue
            name = rec['name']
            if name in {'conda', 'conda-build'}:
                # conda can only be installed in the root environment
                continue
            if name.endswith('@'):
                # this is a 'virtual' feature record that conda adds to the index for the solver
                # and should be ignored here
                continue
            # Don't fail just because the package is a different version of Python
            # than the default.  We should probably check depends rather than the
            # build string.
            build = rec['build']
            match = has_py.search(build)
            assert match if 'py' in build else True, build
            if match:
                additional_packages = ['python=%s.%s' % (match.group(1), match.group(2))]
            else:
                additional_packages = []

            version = rec['version']
            log.info('Testing %s=%s', name, version)

            try:
                install_steps = check_install([name + '=' + version] + additional_packages,
                                                channel_urls=channels, prepend=False,
                                                platform=platform)
                success &= bool(install_steps)
            except KeyboardInterrupt:
                raise
            # sys.exit raises an exception that doesn't subclass from Exception
            except BaseException as e:
                success = False
                log.error("FAIL: %s %s on %s with %s (%s)", name, version,
                          platform, additional_packages, e)
    return success
Beispiel #24
0
def git_info(src_dir, verbose=True, fo=None):
    ''' Print info about a Git repo. '''
    assert isdir(src_dir)

    git = external.find_executable('git')
    if not git:
        log = get_logger(__name__)
        log.warn(
            "git not installed in root environment.  Skipping recording of git info."
        )
        return

    if verbose:
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stderr = FNULL

    # Ensure to explicitly set GIT_DIR as some Linux machines will not
    # properly execute without it.
    env = os.environ.copy()
    env['GIT_DIR'] = join(src_dir, '.git')
    env = {str(key): str(value) for key, value in env.items()}
    for cmd, check_error in [('git log -n1', True),
                             ('git describe --tags --dirty', False),
                             ('git status', True)]:
        try:
            stdout = check_output_env(cmd.split(),
                                      stderr=stderr,
                                      cwd=src_dir,
                                      env=env)
        except CalledProcessError as e:
            if check_error:
                raise Exception("git error: %s" % str(e))
        encoding = locale.getpreferredencoding()
        if not fo:
            encoding = sys.stdout.encoding
        encoding = encoding or 'utf-8'
        if hasattr(stdout, 'decode'):
            stdout = stdout.decode(encoding, 'ignore')
        if fo:
            fo.write(u'==> %s <==\n' % cmd)
            if verbose:
                fo.write(stdout + u'\n')
        else:
            if verbose:
                print(u'==> %s <==\n' % cmd)
                safe_print_unicode(stdout + u'\n')
Beispiel #25
0
def build(m, stats={}):

    if m.skip():
        print(utils.get_skip_message(m))
        return {}

    log = utils.get_logger(__name__)

    with utils.path_prepended(m.config.build_prefix):
        env = environ.get_dict(m=m)

    env["CONDA_BUILD_STATE"] = "BUILD"
    if env_path_backup_var_exists:
        env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"]

    m.output.sections["package"]["name"] = m.output.name
    env["PKG_NAME"] = m.get_value('package/name')

    src_dir = m.config.work_dir
    if isdir(src_dir):
        if m.config.verbose:
            print("source tree in:", src_dir)
    else:
        if m.config.verbose:
            print("no source - creating empty work folder")
        os.makedirs(src_dir)

    utils.rm_rf(m.config.info_dir)
    files_before_script = utils.prefix_files(prefix=m.config.host_prefix)

    with open(join(m.config.build_folder, "prefix_files.txt"), "w") as f:
        f.write("\n".join(sorted(list(files_before_script))))
        f.write("\n")

    execute_build_script(m, src_dir, env)

    files_after_script = utils.prefix_files(prefix=m.config.host_prefix)

    files_difference = files_after_script - files_before_script

    if m.output.sections['build'].get('intermediate') == True:
        utils.rm_rf(m.config.host_prefix)
        return

    bundle_conda(m, files_before_script, env, m.output.sections['files'])
Beispiel #26
0
def filter_by_key_value(variants, key, values, source_name):
    """variants is the exploded out list of dicts, with one value per key in each dict.
    key and values come from subsequent variants before they are exploded out."""
    reduced_variants = []
    if hasattr(values, 'keys'):
        reduced_variants = variants
    else:
        # break this out into a full loop so that we can show filtering output
        for variant in variants:
            if variant.get(key) and variant.get(key) in values:
                reduced_variants.append(variant)
            else:
                log = get_logger(__name__)
                log.debug('Filtering variant with key {key} not matching target value(s) '
                          '({tgt_vals}) from {source_name}, actual {actual_val}'.format(
                              key=key, tgt_vals=values, source_name=source_name,
                              actual_val=variant.get(key)))
    return reduced_variants
Beispiel #27
0
def filter_by_key_value(variants, key, values, source_name):
    """variants is the exploded out list of dicts, with one value per key in each dict.
    key and values come from subsequent variants before they are exploded out."""
    reduced_variants = []
    if hasattr(values, 'keys'):
        reduced_variants = variants
    else:
        # break this out into a full loop so that we can show filtering output
        for variant in variants:
            if variant.get(key) and variant.get(key) in values:
                reduced_variants.append(variant)
            else:
                log = get_logger(__name__)
                log.debug('Filtering variant with key {key} not matching target value(s) '
                          '({tgt_vals}) from {source_name}, actual {actual_val}'.format(
                              key=key, tgt_vals=values, source_name=source_name,
                              actual_val=variant.get(key)))
    return reduced_variants
Beispiel #28
0
def git_info(src_dir, verbose=True, fo=None):
    ''' Print info about a Git repo. '''
    assert isdir(src_dir)

    git = external.find_executable('git')
    if not git:
        log = get_logger(__name__)
        log.warn("git not installed in root environment.  Skipping recording of git info.")
        return

    if verbose:
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stderr = FNULL

    # Ensure to explicitly set GIT_DIR as some Linux machines will not
    # properly execute without it.
    env = os.environ.copy()
    env['GIT_DIR'] = join(src_dir, '.git')
    env = {str(key): str(value) for key, value in env.items()}
    for cmd, check_error in [
            ('git log -n1', True),
            ('git describe --tags --dirty', False),
            ('git status', True)]:
        try:
            stdout = check_output_env(cmd.split(), stderr=stderr, cwd=src_dir, env=env)
        except CalledProcessError as e:
            if check_error:
                raise Exception("git error: %s" % str(e))
        encoding = locale.getpreferredencoding()
        if not fo:
            encoding = sys.stdout.encoding
        encoding = encoding or 'utf-8'
        if hasattr(stdout, 'decode'):
            stdout = stdout.decode(encoding, 'ignore')
        if fo:
            fo.write(u'==> %s <==\n' % cmd)
            if verbose:
                fo.write(stdout + u'\n')
        else:
            if verbose:
                print(u'==> %s <==\n' % cmd)
                safe_print_unicode(stdout + u'\n')
Beispiel #29
0
def clean_pkg_cache(dist, config):
    locks = []

    conda_log_level = logging.WARN
    if config.debug:
        conda_log_level = logging.DEBUG

    _pkgs_dirs = pkgs_dirs[:1]
    if config.locking:
        locks = [
            utils.get_lock(folder, timeout=config.timeout)
            for folder in _pkgs_dirs
        ]
    with utils.LoggingContext(conda_log_level):
        with utils.try_acquire_locks(locks, timeout=config.timeout):
            rmplan = [
                'RM_EXTRACTED {0} local::{0}'.format(dist),
                'RM_FETCHED {0} local::{0}'.format(dist),
            ]
            execute_plan(rmplan)

            # Conda does not seem to do a complete cleanup sometimes.  This is supplemental.
            #   Conda's cleanup is still necessary - it keeps track of its own in-memory
            #   list of downloaded things.
            for folder in pkgs_dirs:
                try:
                    assert not os.path.exists(os.path.join(folder, dist))
                    assert not os.path.exists(
                        os.path.join(folder, dist + '.tar.bz2'))
                    for pkg_id in [dist, 'local::' + dist]:
                        assert pkg_id not in package_cache()
                except AssertionError:
                    log = utils.get_logger(__name__)
                    log.debug(
                        "Conda caching error: %s package remains in cache after removal",
                        dist)
                    log.debug("manually removing to compensate")
                    cache = package_cache()
                    keys = [key for key in cache.keys() if dist in key]
                    for pkg_id in keys:
                        if pkg_id in cache:
                            del cache[pkg_id]
                    for entry in glob(os.path.join(folder, dist + '*')):
                        utils.rm_rf(entry)
Beispiel #30
0
def test_logger_filtering(caplog, capfd):
    import logging
    log = utils.get_logger(__name__, level=logging.DEBUG)
    log.debug('test debug message')
    log.info('test info message')
    log.info('test duplicate message')
    log.info('test duplicate message')
    log.warn('test warn message')
    log.error('test error message')
    out, err = capfd.readouterr()
    assert 'test debug message' in out
    assert 'test info message' in out
    assert 'test warn message' not in out
    assert 'test error message' not in out
    assert 'test debug message' not in err
    assert 'test info message' not in err
    assert 'test warn message' in err
    assert 'test error message' in err
    assert caplog.text.count('duplicate') == 1
Beispiel #31
0
def test_logger_filtering(caplog, capfd):
    import logging
    log = utils.get_logger(__name__, level=logging.DEBUG)
    log.debug('test debug message')
    log.info('test info message')
    log.info('test duplicate message')
    log.info('test duplicate message')
    log.warn('test warn message')
    log.error('test error message')
    out, err = capfd.readouterr()
    assert 'test debug message' in out
    assert 'test info message' in out
    assert 'test warn message' not in out
    assert 'test error message' not in out
    assert 'test debug message' not in err
    assert 'test info message' not in err
    assert 'test warn message' in err
    assert 'test error message' in err
    assert caplog.text.count('duplicate') == 1
Beispiel #32
0
def build_string_from_metadata(metadata):
    if metadata.meta.get('build', {}).get('string'):
        build_str = metadata.get_value('build/string')
    else:
        res = []
        log = utils.get_logger(__name__)

        build_pkg_names = [ms.name for ms in metadata.ms_depends('build')]
        # TODO: this is the bit that puts in strings like py27np111 in the filename.  It would be
        #    nice to get rid of this, since the hash supercedes that functionally, but not clear
        #    whether anyone's tools depend on this file naming right now.
        for s, names, places in (('py', 'python', 2), ('np', 'numpy', 2), ('pl', 'perl', 2),
                                 ('lua', 'lua', 2), ('r', ('r', 'r-base'), 3)):
            for ms in metadata.ms_depends('run'):
                for name in ensure_list(names):
                    if ms.name == name and name in build_pkg_names:
                        # only append numpy when it is actually pinned
                        if name == 'numpy' and (not hasattr(ms, 'version') or not ms.version):
                            continue
                        log.warn("Deprecation notice: computing build string (like pyXY).  This "
                                 "functionality has been replaced with the hash (h????), which"
                                 " can be readily inpsected with `conda inspect hash-inputs "
                                 "<pkg-name>`.  pyXY, npXYY and the like will go away in "
                                 "conda-build 4.0.  Please adapt any code that depends on filenames"
                                 " with pyXY, npXYY, etc.")
                        if metadata.noarch == name or (metadata.get_value('build/noarch_python') and
                                                    name == 'python'):
                            res.append(s)
                        else:
                            variant_version = metadata.config.variant.get(name, "")
                            res.append(''.join([s] + variant_version.split('.')[:places]))

        features = ensure_list(metadata.get_value('build/features', []))
        if res:
            res.append('_')
        if features:
            res.extend(('_'.join(features), '_'))
        res.append('{0}'.format(metadata.build_number() if metadata.build_number() else 0))
        build_str = "".join(res)
    return build_str
Beispiel #33
0
def clean_pkg_cache(dist, config):
    locks = []

    conda_log_level = logging.WARN
    if config.debug:
        conda_log_level = logging.DEBUG

    _pkgs_dirs = pkgs_dirs[:1]
    if config.locking:
        locks = [utils.get_lock(folder, timeout=config.timeout) for folder in _pkgs_dirs]
    with utils.LoggingContext(conda_log_level):
        with utils.try_acquire_locks(locks, timeout=config.timeout):
            rmplan = [
                'RM_EXTRACTED {0} local::{0}'.format(dist),
                'RM_FETCHED {0} local::{0}'.format(dist),
            ]
            execute_plan(rmplan)

            # Conda does not seem to do a complete cleanup sometimes.  This is supplemental.
            #   Conda's cleanup is still necessary - it keeps track of its own in-memory
            #   list of downloaded things.
            for folder in pkgs_dirs:
                try:
                    assert not os.path.exists(os.path.join(folder, dist))
                    assert not os.path.exists(os.path.join(folder, dist + '.tar.bz2'))
                    for pkg_id in [dist, 'local::' + dist]:
                        assert pkg_id not in package_cache()
                except AssertionError:
                    log = utils.get_logger(__name__)
                    log.debug("Conda caching error: %s package remains in cache after removal",
                              dist)
                    log.debug("manually removing to compensate")
                    cache = package_cache()
                    keys = [key for key in cache.keys() if dist in key]
                    for pkg_id in keys:
                        if pkg_id in cache:
                            del cache[pkg_id]
                    for entry in glob(os.path.join(folder, dist + '*')):
                        utils.rm_rf(entry)
Beispiel #34
0
def get_hash_input(packages):
    log = get_logger(__name__)
    hash_inputs = {}
    for pkg in packages:
        pkgname = os.path.basename(pkg)[:-8]
        hash_inputs[pkgname] = {}
        hash_input = package_has_file(pkg, 'info/hash_input.json')
        if hash_input:
            hash_inputs[pkgname]['recipe'] = json.loads(hash_input.decode())
        else:
            hash_inputs[pkgname] = "<no hash_input.json in file>"
        hash_input_files = package_has_file(pkg, 'info/hash_input_files')
        hash_inputs[pkgname]['files'] = []
        if hash_input_files:
            for fname in hash_input_files.splitlines():
                if hasattr(fname, 'decode'):
                    fname = fname.decode()
                hash_inputs[pkgname]['files'].append('info/recipe/{}'.format(fname))
        else:
            log.warn('Package {} does not include recipe.  Full hash information is '
                     'not reproducible.'.format(pkgname))
    return hash_inputs
Beispiel #35
0
def fix_permissions(files, prefix):
    print("Fixing permissions")
    for path in scandir(prefix):
        if path.is_dir():
            lchmod(path.path, 0o775)

    for f in files:
        path = os.path.join(prefix, f)
        st = os.lstat(path)
        old_mode = stat.S_IMODE(st.st_mode)
        new_mode = old_mode
        # broadcast execute
        if old_mode & stat.S_IXUSR:
            new_mode = new_mode | stat.S_IXGRP | stat.S_IXOTH
        # ensure user and group can write and all can read
        new_mode = new_mode | stat.S_IWUSR | stat.S_IWGRP | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH  # noqa
        if old_mode != new_mode:
            try:
                lchmod(path, new_mode)
            except (OSError, utils.PermissionError) as e:
                log = utils.get_logger(__name__)
                log.warn(str(e))
Beispiel #36
0
def get_hash_input(packages):
    log = get_logger(__name__)
    hash_inputs = {}
    for pkg in packages:
        pkgname = os.path.basename(pkg)[:-8]
        hash_inputs[pkgname] = {}
        hash_input = package_has_file(pkg, 'info/hash_input.json')
        if hash_input:
            hash_inputs[pkgname]['recipe'] = json.loads(hash_input.decode())
        else:
            hash_inputs[pkgname] = "<no hash_input.json in file>"
        hash_input_files = package_has_file(pkg, 'info/hash_input_files')
        hash_inputs[pkgname]['files'] = []
        if hash_input_files:
            for fname in hash_input_files.splitlines():
                if hasattr(fname, 'decode'):
                    fname = fname.decode()
                hash_inputs[pkgname]['files'].append('info/recipe/{}'.format(fname))
        else:
            log.warn('Package {} does not include recipe.  Full hash information is '
                     'not reproducible.'.format(pkgname))
    return hash_inputs
Beispiel #37
0
def clean_pkg_cache(dist, config):
    locks = []

    conda_log_level = logging.WARN
    if config.debug:
        conda_log_level = logging.DEBUG

    with utils.LoggingContext(conda_log_level):
        locks = get_pkg_dirs_locks([config.bldpkgs_dir] + pkgs_dirs, config)
        with utils.try_acquire_locks(locks, timeout=config.timeout):
            rmplan = [
                'RM_EXTRACTED {0} local::{0}'.format(dist),
                'RM_FETCHED {0} local::{0}'.format(dist),
            ]
            execute_plan(rmplan)

            # Conda does not seem to do a complete cleanup sometimes.  This is supplemental.
            #   Conda's cleanup is still necessary - it keeps track of its own in-memory
            #   list of downloaded things.
            for folder in pkgs_dirs:
                if (os.path.exists(os.path.join(folder, dist))
                        or os.path.exists(
                            os.path.join(folder, dist + '.tar.bz2'))
                        or any(pkg_id in package_cache()
                               for pkg_id in [dist, 'local::' + dist])):
                    log = utils.get_logger(__name__)
                    log.debug(
                        "Conda caching error: %s package remains in cache after removal",
                        dist)
                    log.debug("manually removing to compensate")
                    cache = package_cache()
                    keys = [key for key in cache.keys() if dist in key]
                    for pkg_id in keys:
                        if pkg_id in cache:
                            del cache[pkg_id]

        # Note that this call acquires the relevant locks, so this must be called
        # outside the lock context above.
        remove_existing_packages(pkgs_dirs, [dist], config)
Beispiel #38
0
def apply_patch(src_dir, path, config, git=None):
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    files, is_git_format = _get_patch_file_details(path)
    if git and is_git_format:
        # Prevents git from asking interactive questions,
        # also necessary to achieve sha1 reproducibility;
        # as is --committer-date-is-author-date. By this,
        # we mean a round-trip of git am/git format-patch
        # gives the same file.
        git_env = os.environ
        git_env['GIT_COMMITTER_NAME'] = 'conda-build'
        git_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'
        check_call_env([git, 'am', '--committer-date-is-author-date', path],
                       cwd=src_dir, stdout=None, env=git_env)
        config.git_commits_since_tag += 1
    else:
        print('Applying patch: %r' % path)
        patch = external.find_executable('patch', config.build_prefix)
        if patch is None:
            sys.exit("""\
        Error:
            Cannot use 'git' (not a git repo and/or patch) and did not find 'patch' in: %s
            You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
            or conda, m2-patch (Windows),
        """ % (os.pathsep.join(external.dir_paths)))
        patch_strip_level = _guess_patch_strip_level(files, src_dir)
        patch_args = ['-p%d' % patch_strip_level, '--ignore-whitespace', '-i', path]

        # line endings are a pain.
        # https://unix.stackexchange.com/a/243748/34459

        try:
            log = get_logger(__name__)
            log.info("Trying to apply patch as-is")
            check_call_env([patch] + patch_args, cwd=src_dir)
        except CalledProcessError:
            if sys.platform == 'win32':
                unix_ending_file = _ensure_unix_line_endings(path)
                patch_args[-1] = unix_ending_file
                try:
                    log.info("Applying unmodified patch failed.  "
                             "Convert to unix line endings and trying again.")
                    check_call_env([patch] + patch_args, cwd=src_dir)
                except:
                    log.info("Applying unix patch failed.  "
                             "Convert to CRLF line endings and trying again with --binary.")
                    patch_args.insert(0, '--binary')
                    win_ending_file = _ensure_win_line_endings(path)
                    patch_args[-1] = win_ending_file
                    try:
                        check_call_env([patch] + patch_args, cwd=src_dir)
                    except:
                        raise
                    finally:
                        if os.path.exists(win_ending_file):
                            os.remove(win_ending_file)  # clean up .patch_win file
                finally:
                    if os.path.exists(unix_ending_file):
                        os.remove(unix_ending_file)  # clean up .patch_unix file
            else:
                raise
Beispiel #39
0
def _chmod(filename, mode):
    try:
        os.chmod(filename, mode)
    except (OSError, utils.PermissionError) as e:
        log = utils.get_logger(__name__)
        log.warn(str(e))
Beispiel #40
0
def download_to_cache(cache_folder, recipe_path, source_dict):
    ''' Download a source to the local cache. '''
    print('Source cache directory is: %s' % cache_folder)
    if not isdir(cache_folder):
        os.makedirs(cache_folder)

    source_urls = source_dict['url']
    if not isinstance(source_urls, list):
        source_urls = [source_urls]
    unhashed_fn = fn = source_dict['fn'] if 'fn' in source_dict else basename(source_urls[0])
    hash_added = False
    for hash_type in ('md5', 'sha1', 'sha256'):
        if hash_type in source_dict:
            fn = append_hash_to_fn(fn, source_dict[hash_type])
            hash_added = True
            break
    else:
        log = get_logger(__name__)
        log.warn("No hash (md5, sha1, sha256) provided for {}.  Source download forced.  "
                 "Add hash to recipe to use source cache.".format(unhashed_fn))
    path = join(cache_folder, fn)
    if isfile(path):
        print('Found source in cache: %s' % fn)
    else:
        print('Downloading source to cache: %s' % fn)

        for url in source_urls:
            if "://" not in url:
                if url.startswith('~'):
                    url = expanduser(url)
                if not os.path.isabs(url):
                    url = os.path.normpath(os.path.join(recipe_path, url))
                url = url_path(url)
            else:
                if url.startswith('file:///~'):
                    url = 'file:///' + expanduser(url[8:]).replace('\\', '/')
            try:
                print("Downloading %s" % url)
                with LoggingContext():
                    download(url, path)
            except CondaHTTPError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
                rm_rf(path)
            except RuntimeError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
                rm_rf(path)
            else:
                print("Success")
                break
        else:  # no break
            rm_rf(path)
            raise RuntimeError("Could not download %s" % url)

    hashed = None
    for tp in ('md5', 'sha1', 'sha256'):
        if tp in source_dict:
            expected_hash = source_dict[tp]
            hashed = hashsum_file(path, tp)
            if expected_hash != hashed:
                rm_rf(path)
                raise RuntimeError("%s mismatch: '%s' != '%s'" %
                           (tp.upper(), hashed, expected_hash))
            break

    # this is really a fallback.  If people don't provide the hash, we still need to prevent
    #    collisions in our source cache, but the end user will get no benefirt from the cache.
    if not hash_added:
        if not hashed:
            hashed = hashsum_file(path, 'sha256')
        dest_path = append_hash_to_fn(path, hashed)
        if not os.path.isfile(dest_path):
            shutil.move(path, dest_path)
        path = dest_path

    return path, unhashed_fn
Beispiel #41
0
def get_build_index(subdir, bldpkgs_dir, output_folder=None, clear_cache=False,
                    omit_defaults=False, channel_urls=None, debug=False, verbose=True,
                    locking=True, timeout=90):
    global local_index_timestamp
    global local_subdir
    global cached_index
    global cached_channels
    log = utils.get_logger(__name__)
    mtime = 0

    channel_urls = list(utils.ensure_list(channel_urls))

    if not output_folder:
        output_folder = os.path.dirname(bldpkgs_dir)

    # check file modification time - this is the age of our index.
    index_file = os.path.join(output_folder, subdir, 'repodata.json')
    if os.path.isfile(index_file):
        mtime = os.path.getmtime(index_file)

    if (clear_cache or
            not os.path.isfile(index_file) or
            local_subdir != subdir or
            mtime > local_index_timestamp or
            cached_channels != channel_urls):

        log.debug("Building new index for subdir '{}' with channels {}, condarc channels "
                  "= {}".format(subdir, channel_urls, not omit_defaults))
        # priority: local by croot (can vary), then channels passed as args,
        #     then channels from config.
        capture = contextlib.contextmanager(lambda: (yield))
        if debug:
            log_context = partial(utils.LoggingContext, logging.DEBUG)
        elif verbose:
            log_context = partial(utils.LoggingContext, logging.WARN)
        else:
            log_context = partial(utils.LoggingContext, logging.CRITICAL + 1)
            capture = utils.capture

        urls = list(channel_urls)
        if os.path.isdir(output_folder):
            urls.insert(0, url_path(output_folder))
        ensure_valid_channel(output_folder, subdir, verbose=verbose, locking=locking,
                             timeout=timeout)

        # silence output from conda about fetching index files
        with log_context():
            with capture():
                # replace noarch with native subdir - this ends up building an index with both the
                #      native content and the noarch content.
                if subdir == 'noarch':
                    subdir = conda_interface.subdir
                try:
                    cached_index = get_index(channel_urls=urls,
                                    prepend=not omit_defaults,
                                    use_local=False,
                                    use_cache=False,
                                    platform=subdir)
                # HACK: defaults does not have the many subfolders we support.  Omit it and
                #          try again.
                except CondaHTTPError:
                    if 'defaults' in urls:
                        urls.remove('defaults')
                    cached_index = get_index(channel_urls=urls,
                                             prepend=omit_defaults,
                                             use_local=False,
                                             use_cache=False,
                                             platform=subdir)
        local_index_timestamp = os.path.getmtime(index_file)
        local_subdir = subdir
        cached_channels = channel_urls
    return cached_index, local_index_timestamp
Beispiel #42
0
def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    if not parent_metadata:
        parent_metadata = m
    if m.skip():
        rendered_metadata = m.copy()
        rendered_metadata.final = True
    else:

        exclude_pattern = None
        excludes = set(m.config.variant.get('ignore_version', []))

        for key in m.config.variant.get('pin_run_as_build', {}).keys():
            if key in excludes:
                excludes.remove(key)

        output_excludes = set()
        if hasattr(m, 'other_outputs'):
            output_excludes = set(name for (name, variant) in m.other_outputs.keys())

        if excludes or output_excludes:
            exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc)
                                            for exc in excludes | output_excludes))

        parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {})

        # extract the topmost section where variables are defined, and put it on top of the
        #     requirements for a particular output
        # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
        parent_metadata = parent_metadata.copy()
        parent_metadata.config.variant = m.config.variant
        output = parent_metadata.get_rendered_output(m.name())

        if output:
            if 'package' in output or 'name' not in output:
                # it's just a top-level recipe
                output = {'name': m.name()}

            if not parent_recipe or parent_recipe['name'] == m.name():
                combine_top_level_metadata_with_output(m, output)
            requirements = utils.expand_reqs(output.get('requirements', {}))
            m.meta['requirements'] = requirements

        if m.meta.get('requirements'):
            utils.insert_variant_versions(m.meta['requirements'],
                                          m.config.variant, 'build')
            utils.insert_variant_versions(m.meta['requirements'],
                                        m.config.variant, 'host')

        m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name()))
        build_unsat, host_unsat = add_upstream_pins(m,
                                                    permit_unsatisfiable_variants,
                                                    exclude_pattern)
        # getting this AFTER add_upstream_pins is important, because that function adds deps
        #     to the metadata.
        requirements = m.meta.get('requirements', {})

        # this is hacky, but it gets the jinja2 things like pin_compatible from the rendered output
        # rerendered_output = parent_metadata.get_output_metadata(m.get_rendered_output(m.name()))
        # run_reqs = utils.expand_reqs(rerendered_output.meta.get('requirements', {}))
        # run_reqs = run_reqs.get('run', [])
        # if run_reqs:
        #     requirements['run'] = run_reqs
        # m.meta['requirements'] = requirements
        # m.meta['build'] = rerendered_output.meta.get('build', {})

        # here's where we pin run dependencies to their build time versions.  This happens based
        #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
        #     names to have this behavior.
        if output_excludes:
            exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc)
                                            for exc in output_excludes))
        pinning_env = 'host' if m.is_cross else 'build'

        build_reqs = requirements.get(pinning_env, [])
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one
        if build_reqs and 'python' in build_reqs:
            build_reqs.append('python {}'.format(m.config.variant['python']))
            m.meta['requirements'][pinning_env] = build_reqs

        full_build_deps, _, _ = get_env_dependencies(m, pinning_env,
                                        m.config.variant,
                                        exclude_pattern=exclude_pattern,
                                        permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        full_build_dep_versions = {dep.split()[0]: " ".join(dep.split()[1:])
                                   for dep in full_build_deps}

        if isfile(m.requirements_path) and not requirements.get('run'):
            requirements['run'] = specs_from_url(m.requirements_path)
        run_deps = requirements.get('run', [])

        versioned_run_deps = [get_pin_from_build(m, dep, full_build_dep_versions)
                            for dep in run_deps]
        versioned_run_deps = [utils.ensure_valid_spec(spec, warn=True)
                              for spec in versioned_run_deps]
        requirements[pinning_env] = full_build_deps
        requirements['run'] = versioned_run_deps

        m.meta['requirements'] = requirements

        # append other requirements, such as python.app, appropriately
        m.append_requirements()

        if m.pin_depends == 'strict':
            m.meta['requirements']['run'] = environ.get_pinned_deps(
                m, 'run')
        test_deps = m.get_value('test/requires')
        if test_deps:
            versioned_test_deps = list({get_pin_from_build(m, dep, full_build_dep_versions)
                                        for dep in test_deps})
            versioned_test_deps = [utils.ensure_valid_spec(spec, warn=True)
                                for spec in versioned_test_deps]
            m.meta['test']['requires'] = versioned_test_deps
        extra = m.meta.get('extra', {})
        extra['copy_test_source_files'] = m.config.copy_test_source_files
        m.meta['extra'] = extra

        # if source/path is relative, then the output package makes no sense at all.  The next
        #   best thing is to hard-code the absolute path.  This probably won't exist on any
        #   system other than the original build machine, but at least it will work there.
        if m.meta.get('source'):
            if 'path' in m.meta['source']:
                source_path = m.meta['source']['path']
                os.path.expanduser(source_path)
                if not os.path.isabs(source_path):
                    m.meta['source']['path'] = os.path.normpath(
                        os.path.join(m.path, source_path))
                elif ('git_url' in m.meta['source'] and not (
                        # absolute paths are not relative paths
                        os.path.isabs(m.meta['source']['git_url']) or
                        # real urls are not relative paths
                        ":" in m.meta['source']['git_url'])):
                    m.meta['source']['git_url'] = os.path.normpath(
                        os.path.join(m.path, m.meta['source']['git_url']))

        if not m.meta.get('build'):
            m.meta['build'] = {}

        _simplify_to_exact_constraints(m)

        if build_unsat or host_unsat:
            m.final = False
            log = utils.get_logger(__name__)
            log.warn("Returning non-final recipe for {}; one or more dependencies "
                    "was unsatisfiable:".format(m.dist()))
            if build_unsat:
                log.warn("Build: {}".format(build_unsat))
            if host_unsat:
                log.warn("Host: {}".format(host_unsat))
        else:
            m.final = True
    return m
Beispiel #43
0
def create_env(prefix,
               specs_or_actions,
               env,
               config,
               subdir,
               clear_cache=True,
               retry=0,
               locks=None,
               is_cross=False,
               is_conda=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if config.debug:
        external_logger_context = utils.LoggingContext(logging.DEBUG)
    else:
        external_logger_context = utils.LoggingContext(logging.WARN)

    if os.path.exists(prefix):
        for entry in glob(os.path.join(prefix, "*")):
            utils.rm_rf(entry)

    with external_logger_context:
        log = utils.get_logger(__name__)

        # if os.path.isdir(prefix):
        #     utils.rm_rf(prefix)

        if specs_or_actions:  # Don't waste time if there is nothing to do
            log.debug("Creating environment in %s", prefix)
            log.debug(str(specs_or_actions))

            if not locks:
                locks = utils.get_conda_operation_locks(config)
            try:
                with utils.try_acquire_locks(locks, timeout=config.timeout):
                    # input is a list - it's specs in MatchSpec format
                    if not hasattr(specs_or_actions, 'keys'):
                        specs = list(set(specs_or_actions))
                        actions = get_install_actions(
                            prefix,
                            tuple(specs),
                            env,
                            subdir=subdir,
                            verbose=config.verbose,
                            debug=config.debug,
                            locking=config.locking,
                            bldpkgs_dirs=tuple(config.bldpkgs_dirs),
                            timeout=config.timeout,
                            disable_pip=config.disable_pip,
                            max_env_retry=config.max_env_retry,
                            output_folder=config.output_folder,
                            channel_urls=tuple(config.channel_urls))
                    else:
                        actions = specs_or_actions
                    index, _, _ = get_build_index(
                        subdir=subdir,
                        bldpkgs_dir=config.bldpkgs_dir,
                        output_folder=config.output_folder,
                        channel_urls=config.channel_urls,
                        debug=config.debug,
                        verbose=config.verbose,
                        locking=config.locking,
                        timeout=config.timeout)
                    utils.trim_empty_keys(actions)
                    display_actions(actions, index)
                    if utils.on_win:
                        for k, v in os.environ.items():
                            os.environ[k] = str(v)
                    with env_var('CONDA_QUIET', not config.verbose,
                                 reset_context):
                        with env_var('CONDA_JSON', not config.verbose,
                                     reset_context):
                            execute_actions(actions, index)
            except (SystemExit, PaddingError, LinkError,
                    DependencyNeedsBuildingError, CondaError,
                    BuildLockError) as exc:
                if (("too short in" in str(exc) or re.search(
                        'post-link failed for: (?:[a-zA-Z]*::)?openssl',
                        str(exc)) or isinstance(exc, PaddingError))
                        and config.prefix_length > 80):
                    if config.prefix_length_fallback:
                        log.warn("Build prefix failed with prefix length %d",
                                 config.prefix_length)
                        log.warn("Error was: ")
                        log.warn(str(exc))
                        log.warn(
                            "One or more of your package dependencies needs to be rebuilt "
                            "with a longer prefix length.")
                        log.warn(
                            "Falling back to legacy prefix length of 80 characters."
                        )
                        log.warn(
                            "Your package will not install into prefixes > 80 characters."
                        )
                        config.prefix_length = 80

                        host = '_h_env' in prefix
                        # Set this here and use to create environ
                        #   Setting this here is important because we use it below (symlink)
                        prefix = config.host_prefix if host else config.build_prefix
                        actions['PREFIX'] = prefix

                        create_env(prefix,
                                   actions,
                                   config=config,
                                   subdir=subdir,
                                   env=env,
                                   clear_cache=clear_cache,
                                   is_cross=is_cross)
                    else:
                        raise
                elif 'lock' in str(exc):
                    if retry < config.max_env_retry:
                        log.warn(
                            "failed to create env, retrying.  exception was: %s",
                            str(exc))
                        create_env(prefix,
                                   specs_or_actions,
                                   config=config,
                                   subdir=subdir,
                                   env=env,
                                   clear_cache=clear_cache,
                                   retry=retry + 1,
                                   is_cross=is_cross)
                elif ('requires a minimum conda version' in str(exc)
                      or 'link a source that does not' in str(exc)):
                    with utils.try_acquire_locks(locks,
                                                 timeout=config.timeout):
                        pkg_dir = str(exc)
                        folder = 0
                        while os.path.dirname(
                                pkg_dir) not in pkgs_dirs and folder < 20:
                            pkg_dir = os.path.dirname(pkg_dir)
                            folder += 1
                        log.warn(
                            "I think conda ended up with a partial extraction for %s.  "
                            "Removing the folder and retrying", pkg_dir)
                        if os.path.isdir(pkg_dir):
                            utils.rm_rf(pkg_dir)
                    if retry < config.max_env_retry:
                        log.warn(
                            "failed to create env, retrying.  exception was: %s",
                            str(exc))
                        create_env(prefix,
                                   specs_or_actions,
                                   config=config,
                                   subdir=subdir,
                                   env=env,
                                   clear_cache=clear_cache,
                                   retry=retry + 1,
                                   is_cross=is_cross)
                    else:
                        log.error(
                            "Failed to create env, max retries exceeded.")
                        raise
                else:
                    raise
            # HACK: some of the time, conda screws up somehow and incomplete packages result.
            #    Just retry.
            except (AssertionError, IOError, ValueError, RuntimeError,
                    LockError) as exc:
                if isinstance(exc, AssertionError):
                    with utils.try_acquire_locks(locks,
                                                 timeout=config.timeout):
                        pkg_dir = os.path.dirname(os.path.dirname(str(exc)))
                        log.warn(
                            "I think conda ended up with a partial extraction for %s.  "
                            "Removing the folder and retrying", pkg_dir)
                        if os.path.isdir(pkg_dir):
                            utils.rm_rf(pkg_dir)
                if retry < config.max_env_retry:
                    log.warn(
                        "failed to create env, retrying.  exception was: %s",
                        str(exc))
                    create_env(prefix,
                               specs_or_actions,
                               config=config,
                               subdir=subdir,
                               env=env,
                               clear_cache=clear_cache,
                               retry=retry + 1,
                               is_cross=is_cross)
                else:
                    log.error("Failed to create env, max retries exceeded.")
                    raise
Beispiel #44
0
def finalize_metadata(m,
                      parent_metadata=None,
                      permit_unsatisfiable_variants=False):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    if not parent_metadata:
        parent_metadata = m
    if m.skip():
        m.final = True
    else:
        exclude_pattern = None
        excludes = set(m.config.variant.get('ignore_version', []))

        for key in m.config.variant.get('pin_run_as_build', {}).keys():
            if key in excludes:
                excludes.remove(key)

        output_excludes = set()
        if hasattr(m, 'other_outputs'):
            output_excludes = set(name for (name,
                                            variant) in m.other_outputs.keys())

        if excludes or output_excludes:
            exclude_pattern = re.compile(r'|'.join(
                r'(?:^{}(?:\s|$|\Z))'.format(exc)
                for exc in excludes | output_excludes))

        parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {})

        # extract the topmost section where variables are defined, and put it on top of the
        #     requirements for a particular output
        # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
        output = parent_metadata.get_rendered_output(m.name(),
                                                     variant=m.config.variant)

        is_top_level = True
        if output:
            if 'package' in output or 'name' not in output:
                # it's just a top-level recipe
                output = {'name': m.name()}
            else:
                is_top_level = False

            if not parent_recipe or parent_recipe['name'] == m.name():
                combine_top_level_metadata_with_output(m, output)
            requirements = utils.expand_reqs(output.get('requirements', {}))
            m.meta['requirements'] = requirements

        if m.meta.get('requirements'):
            utils.insert_variant_versions(m.meta['requirements'],
                                          m.config.variant, 'build')
            utils.insert_variant_versions(m.meta['requirements'],
                                          m.config.variant, 'host')

        m = parent_metadata.get_output_metadata(m.get_rendered_output(
            m.name()))
        build_unsat, host_unsat = add_upstream_pins(
            m, permit_unsatisfiable_variants, exclude_pattern)
        # getting this AFTER add_upstream_pins is important, because that function adds deps
        #     to the metadata.
        requirements = m.meta.get('requirements', {})

        # here's where we pin run dependencies to their build time versions.  This happens based
        #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
        #     names to have this behavior.
        if output_excludes:
            exclude_pattern = re.compile(r'|'.join(
                r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in output_excludes))
        pinning_env = 'host' if m.is_cross else 'build'

        build_reqs = requirements.get(pinning_env, [])
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one
        if build_reqs and 'python' in build_reqs:
            build_reqs.append('python {}'.format(m.config.variant['python']))
            m.meta['requirements'][pinning_env] = build_reqs

        full_build_deps, _, _ = get_env_dependencies(
            m,
            pinning_env,
            m.config.variant,
            exclude_pattern=exclude_pattern,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        full_build_dep_versions = {
            dep.split()[0]: " ".join(dep.split()[1:])
            for dep in full_build_deps
        }

        if isfile(m.requirements_path) and not requirements.get('run'):
            requirements['run'] = specs_from_url(m.requirements_path)
        run_deps = requirements.get('run', [])

        versioned_run_deps = [
            get_pin_from_build(m, dep, full_build_dep_versions)
            for dep in run_deps
        ]
        versioned_run_deps = [
            utils.ensure_valid_spec(spec, warn=True)
            for spec in versioned_run_deps
        ]
        requirements[pinning_env] = full_build_deps
        requirements['run'] = versioned_run_deps

        m.meta['requirements'] = requirements

        # append other requirements, such as python.app, appropriately
        m.append_requirements()

        if m.pin_depends == 'strict':
            m.meta['requirements']['run'] = environ.get_pinned_deps(m, 'run')
        test_deps = m.get_value('test/requires')
        if test_deps:
            versioned_test_deps = list({
                get_pin_from_build(m, dep, full_build_dep_versions)
                for dep in test_deps
            })
            versioned_test_deps = [
                utils.ensure_valid_spec(spec, warn=True)
                for spec in versioned_test_deps
            ]
            m.meta['test']['requires'] = versioned_test_deps
        extra = m.meta.get('extra', {})
        extra['copy_test_source_files'] = m.config.copy_test_source_files
        m.meta['extra'] = extra

        # if source/path is relative, then the output package makes no sense at all.  The next
        #   best thing is to hard-code the absolute path.  This probably won't exist on any
        #   system other than the original build machine, but at least it will work there.
        if m.meta.get('source'):
            if 'path' in m.meta['source']:
                source_path = m.meta['source']['path']
                os.path.expanduser(source_path)
                if not os.path.isabs(source_path):
                    m.meta['source']['path'] = os.path.normpath(
                        os.path.join(m.path, source_path))
                elif ('git_url' in m.meta['source'] and not (
                        # absolute paths are not relative paths
                        os.path.isabs(m.meta['source']['git_url']) or
                        # real urls are not relative paths
                        ":" in m.meta['source']['git_url'])):
                    m.meta['source']['git_url'] = os.path.normpath(
                        os.path.join(m.path, m.meta['source']['git_url']))

        if not m.meta.get('build'):
            m.meta['build'] = {}

        _simplify_to_exact_constraints(m)

        if build_unsat or host_unsat:
            m.final = False
            log = utils.get_logger(__name__)
            log.warn(
                "Returning non-final recipe for {}; one or more dependencies "
                "was unsatisfiable:".format(m.dist()))
            if build_unsat:
                log.warn("Build: {}".format(build_unsat))
            if host_unsat:
                log.warn("Host: {}".format(host_unsat))
        else:
            m.final = True
    if is_top_level:
        parent_metadata = m
    return m
Beispiel #45
0
def get_install_actions(prefix,
                        specs,
                        env,
                        retries=0,
                        subdir=None,
                        verbose=True,
                        debug=False,
                        locking=True,
                        bldpkgs_dirs=None,
                        timeout=900,
                        disable_pip=False,
                        max_env_retry=3,
                        output_folder=None,
                        channel_urls=None):
    global cached_actions
    global last_index_ts
    actions = {}
    log = utils.get_logger(__name__)
    conda_log_level = logging.WARN
    specs = list(specs)
    if specs:
        specs.extend(create_default_packages)
    if verbose or debug:
        capture = contextlib.contextmanager(lambda: (yield))
        if debug:
            conda_log_level = logging.DEBUG
    else:
        capture = utils.capture
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    bldpkgs_dirs = ensure_list(bldpkgs_dirs)

    index, index_ts, _ = get_build_index(subdir,
                                         list(bldpkgs_dirs)[0],
                                         output_folder=output_folder,
                                         channel_urls=channel_urls,
                                         debug=debug,
                                         verbose=verbose,
                                         locking=locking,
                                         timeout=timeout)
    specs = tuple(
        utils.ensure_valid_spec(spec) for spec in specs
        if not str(spec).endswith('@'))

    if ((specs, env, subdir, channel_urls, disable_pip) in cached_actions
            and last_index_ts >= index_ts):
        actions = cached_actions[(specs, env, subdir, channel_urls,
                                  disable_pip)].copy()
        if "PREFIX" in actions:
            actions['PREFIX'] = prefix
    elif specs:
        # this is hiding output like:
        #    Fetching package metadata ...........
        #    Solving package specifications: ..........
        with utils.LoggingContext(conda_log_level):
            with capture():
                try:
                    actions = install_actions(prefix, index, specs, force=True)
                except (NoPackagesFoundError, UnsatisfiableError) as exc:
                    raise DependencyNeedsBuildingError(exc, subdir=subdir)
                except (SystemExit, PaddingError, LinkError,
                        DependencyNeedsBuildingError, CondaError,
                        AssertionError, BuildLockError) as exc:
                    if 'lock' in str(exc):
                        log.warn(
                            "failed to get install actions, retrying.  exception was: %s",
                            str(exc))
                    elif ('requires a minimum conda version' in str(exc)
                          or 'link a source that does not' in str(exc)
                          or isinstance(exc, AssertionError)):
                        locks = utils.get_conda_operation_locks(
                            locking, bldpkgs_dirs, timeout)
                        with utils.try_acquire_locks(locks, timeout=timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(
                                    pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn(
                                "I think conda ended up with a partial extraction for %s. "
                                "Removing the folder and retrying", pkg_dir)
                            if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retries < max_env_retry:
                        log.warn(
                            "failed to get install actions, retrying.  exception was: %s",
                            str(exc))
                        actions = get_install_actions(
                            prefix,
                            tuple(specs),
                            env,
                            retries=retries + 1,
                            subdir=subdir,
                            verbose=verbose,
                            debug=debug,
                            locking=locking,
                            bldpkgs_dirs=tuple(bldpkgs_dirs),
                            timeout=timeout,
                            disable_pip=disable_pip,
                            max_env_retry=max_env_retry,
                            output_folder=output_folder,
                            channel_urls=tuple(channel_urls))
                    else:
                        log.error(
                            "Failed to get install actions, max retries exceeded."
                        )
                        raise
        if disable_pip:
            for pkg in ('pip', 'setuptools', 'wheel'):
                # specs are the raw specifications, not the conda-derived actual specs
                #   We're testing that pip etc. are manually specified
                if not any(
                        re.match(r'^%s(?:$|[\s=].*)' % pkg, str(dep))
                        for dep in specs):
                    actions['LINK'] = [
                        spec for spec in actions['LINK'] if spec.name != pkg
                    ]
        utils.trim_empty_keys(actions)
        cached_actions[(specs, env, subdir, channel_urls,
                        disable_pip)] = actions.copy()
        last_index_ts = index_ts
    return actions
Beispiel #46
0
def update_index(dir_path, config, force=False, check_md5=False, remove=True, lock=None,
                 could_be_mirror=True):
    """
    Update all index files in dir_path with changed packages.

    :param verbose: Should detailed status messages be output?
    :type verbose: bool
    :param force: Whether to re-index all packages (including those that
                  haven't changed) or not.
    :type force: bool
    :param check_md5: Whether to check MD5s instead of mtimes for determining
                      if a package changed.
    :type check_md5: bool
    """

    log = utils.get_logger(__name__)

    log.debug("updating index in: %s", dir_path)
    if not os.path.isdir(dir_path):
        os.makedirs(dir_path)

    index_path = join(dir_path, '.index.json')

    if not lock:
        lock = get_lock(dir_path)

    locks = []
    if config.locking:
        locks.append(lock)

    index = {}

    with try_acquire_locks(locks, config.timeout):
        if not force:
            try:
                mode_dict = {'mode': 'r', 'encoding': 'utf-8'} if PY3 else {'mode': 'rb'}
                with open(index_path, **mode_dict) as fi:
                    index = json.load(fi)
            except (IOError, ValueError):
                index = {}

        subdir = None

        files = set(fn for fn in os.listdir(dir_path) if fn.endswith('.tar.bz2'))
        if could_be_mirror and any(fn.startswith('_license-') for fn in files):
            sys.exit("""\
    Error:
        Indexing a copy of the Anaconda conda package channel is neither
        necessary nor supported.  If you wish to add your own packages,
        you can do so by adding them to a separate channel.
    """)
        for fn in files:
            path = join(dir_path, fn)
            if fn in index:
                if check_md5:
                    if index[fn]['md5'] == md5_file(path):
                        continue
                elif index[fn]['mtime'] == getmtime(path):
                    continue
            if config.verbose:
                print('updating:', fn)
            d = read_index_tar(path, config, lock=lock)
            d.update(file_info(path))
            index[fn] = d
            # there's only one subdir for a given folder, so only read these contents once
            if not subdir:
                subdir = d['subdir']

        for fn in files:
            index[fn]['sig'] = '.' if isfile(join(dir_path, fn + '.sig')) else None

        if remove:
            # remove files from the index which are not on disk
            for fn in set(index) - files:
                if config.verbose:
                    print("removing:", fn)
                del index[fn]

        # Deal with Python 2 and 3's different json module type reqs
        mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'}
        with open(index_path, **mode_dict) as fo:
            json.dump(index, fo, indent=2, sort_keys=True, default=str)

        # --- new repodata
        for fn in index:
            info = index[fn]
            for varname in 'arch', 'platform', 'mtime', 'ucs':
                try:
                    del info[varname]
                except KeyError:
                    pass

            if 'requires' in info and 'depends' not in info:
                info['depends'] = info['requires']

        repodata = {'packages': index, 'info': {}}
        write_repodata(repodata, dir_path, lock=lock, config=config)
Beispiel #47
0
def get_install_actions(prefix, specs, env, retries=0, subdir=None,
                        verbose=True, debug=False, locking=True,
                        bldpkgs_dirs=None, timeout=90, disable_pip=False,
                        max_env_retry=3, output_folder=None, channel_urls=None):
    global cached_actions
    global last_index_ts
    actions = {}
    log = utils.get_logger(__name__)
    conda_log_level = logging.WARN
    if verbose:
        capture = contextlib.contextmanager(lambda: (yield))
    elif debug:
        capture = contextlib.contextmanager(lambda: (yield))
        conda_log_level = logging.DEBUG
    else:
        capture = utils.capture
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    bldpkgs_dirs = ensure_list(bldpkgs_dirs)

    index, index_ts = get_build_index(subdir, list(bldpkgs_dirs)[0], output_folder=output_folder,
                                      channel_urls=channel_urls, debug=debug, verbose=verbose,
                                      locking=locking, timeout=timeout)
    specs = tuple(_ensure_valid_spec(spec) for spec in specs)

    if (specs, env, subdir, channel_urls) in cached_actions and last_index_ts >= index_ts:
        actions = cached_actions[(specs, env, subdir, channel_urls)].copy()
        if "PREFIX" in actions:
            actions['PREFIX'] = prefix
    elif specs:
        # this is hiding output like:
        #    Fetching package metadata ...........
        #    Solving package specifications: ..........
        with utils.LoggingContext(conda_log_level):
            with capture():
                try:
                    actions = install_actions(prefix, index, specs, force=True)
                except NoPackagesFoundError as exc:
                    raise DependencyNeedsBuildingError(exc, subdir=subdir)
                except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError,
                        CondaError, AssertionError) as exc:
                    if 'lock' in str(exc):
                        log.warn("failed to get install actions, retrying.  exception was: %s",
                                str(exc))
                    elif ('requires a minimum conda version' in str(exc) or
                            'link a source that does not' in str(exc) or
                            isinstance(exc, AssertionError)):
                        locks = utils.get_conda_operation_locks(locking, bldpkgs_dirs, timeout)
                        with utils.try_acquire_locks(locks, timeout=timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn("I think conda ended up with a partial extraction for %s. "
                                        "Removing the folder and retrying", pkg_dir)
                            if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retries < max_env_retry:
                        log.warn("failed to get install actions, retrying.  exception was: %s",
                                str(exc))
                        actions = get_install_actions(prefix, tuple(specs), env,
                                                      retries=retries + 1,
                                                      subdir=subdir,
                                                      verbose=verbose,
                                                      debug=debug,
                                                      locking=locking,
                                                      bldpkgs_dirs=tuple(bldpkgs_dirs),
                                                      timeout=timeout,
                                                      disable_pip=disable_pip,
                                                      max_env_retry=max_env_retry,
                                                      output_folder=output_folder,
                                                      channel_urls=tuple(channel_urls))
                    else:
                        log.error("Failed to get install actions, max retries exceeded.")
                        raise
        if disable_pip:
            actions['LINK'] = [spec for spec in actions['LINK']
                                if not spec.startswith('pip-') and
                                not spec.startswith('setuptools-')]
        utils.trim_empty_keys(actions)
        cached_actions[(specs, env, subdir, channel_urls)] = actions.copy()
        last_index_ts = index_ts
    return actions
Beispiel #48
0
def msvc_env_cmd(bits, config, override=None):
    log = get_logger(__name__)
    log.warn("Using legacy MSVC compiler setup.  This will be removed in conda-build 4.0.  "
             "Use {{compiler('c')}} jinja2 in requirements/build or explicitly list compiler "
             "package as build dependency instead.")
    arch_selector = 'x86' if bits == 32 else 'amd64'

    msvc_env_lines = []

    version = None
    if override is not None:
        version = override

    # The DISTUTILS_USE_SDK variable tells distutils to not try and validate
    # the MSVC compiler. For < 3.5 this still forcibly looks for 'cl.exe'.
    # For > 3.5 it literally just skips the validation logic.
    # See distutils _msvccompiler.py and msvc9compiler.py / msvccompiler.py
    # for more information.
    msvc_env_lines.append('set DISTUTILS_USE_SDK=1')
    # This is also required to hit the 'don't validate' logic on < 3.5.
    # For > 3.5 this is ignored.
    msvc_env_lines.append('set MSSdk=1')

    if not version:
        py_ver = config.variant.get('python', get_default_variants()[0]['python'])
        if int(py_ver[0]) >= 3:
            if int(py_ver.split('.')[1]) < 5:
                version = '10.0'
            version = '14.0'
        else:
            version = '9.0'

    if float(version) >= 14.0:
        # For Python 3.5+, ensure that we link with the dynamic runtime.  See
        # http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info
        msvc_env_lines.append('set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\vcruntime{0}.dll'.format(
            version.replace('.', '')))

    vcvarsall_vs_path = build_vcvarsall_vs_path(version)

    def build_vcvarsall_cmd(cmd, arch=arch_selector):
        # Default argument `arch_selector` is defined above
        return 'call "{cmd}" {arch}'.format(cmd=cmd, arch=arch)

    msvc_env_lines.append('set "VS_VERSION={}"'.format(version))
    msvc_env_lines.append('set "VS_MAJOR={}"'.format(version.split('.')[0]))
    msvc_env_lines.append('set "VS_YEAR={}"'.format(VS_VERSION_STRING[version][-4:]))
    msvc_env_lines.append('set "CMAKE_GENERATOR={}"'.format(VS_VERSION_STRING[version] +
                                                            {64: ' Win64', 32: ''}[bits]))
    # tell msys2 to ignore path conversions for issue-causing windows-style flags in build
    #   See https://github.com/conda-forge/icu-feedstock/pull/5
    msvc_env_lines.append('set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out"')
    msvc_env_lines.append('set "MSYS2_ENV_CONV_EXCL=CL"')
    if version == '10.0':
        try:
            WIN_SDK_71_PATH = Reg.get_value(os.path.join(WINSDK_BASE, 'v7.1'),
                                            'installationfolder')
            WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, 'Bin', 'SetEnv.cmd')

            win_sdk_arch = '/Release /x86' if bits == 32 else '/Release /x64'
            win_sdk_cmd = build_vcvarsall_cmd(WIN_SDK_71_BAT_PATH, arch=win_sdk_arch)

            # There are two methods of building Python 3.3 and 3.4 extensions (both
            # of which required Visual Studio 2010 - as explained in the Python wiki
            # https://wiki.python.org/moin/WindowsCompilers)
            # 1) Use the Windows SDK 7.1
            # 2) Use Visual Studio 2010 (any edition)
            # However, VS2010 never shipped with a 64-bit compiler, so in this case
            # **only** option (1) applies. For this reason, we always try and
            # activate the Windows SDK first. Unfortunately, unsuccessfully setting
            # up the environment does **not EXIT 1** and therefore we must fall
            # back to attempting to set up VS2010.
            # DelayedExpansion is required for the SetEnv.cmd
            msvc_env_lines.append('Setlocal EnableDelayedExpansion')
            msvc_env_lines.append(win_sdk_cmd)
            # If the WindowsSDKDir environment variable has not been successfully
            # set then try activating VS2010
            msvc_env_lines.append('if not "%WindowsSDKDir%" == "{}" ( {} )'.format(
                WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path)))
        # sdk is not installed.  Fall back to only trying VS 2010
        except KeyError:
            msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))
    elif version == '9.0':
        # Get the Visual Studio 2008 path (not the Visual C++ for Python path)
        # and get the 'vcvars64.bat' from inside the bin (in the directory above
        # that returned by distutils_find_vcvarsall)
        try:
            VCVARS64_VS9_BAT_PATH = os.path.join(os.path.dirname(distutils_find_vcvarsall(9)),
                                                'bin', 'vcvars64.bat')
        # there's an exception if VS or the VC compiler for python are not actually installed.
        except (KeyError, TypeError):
            VCVARS64_VS9_BAT_PATH = None

        error1 = 'if errorlevel 1 {}'

        # Prefer VS9 proper over Microsoft Visual C++ Compiler for Python 2.7
        msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))
        # The Visual Studio 2008 Express edition does not properly contain
        # the amd64 build files, so we call the vcvars64.bat manually,
        # rather than using the vcvarsall.bat which would try and call the
        # missing bat file.
        if arch_selector == 'amd64' and VCVARS64_VS9_BAT_PATH:
            msvc_env_lines.append(error1.format(
                build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH)))
        # Otherwise, fall back to icrosoft Visual C++ Compiler for Python 2.7+
        # by using the logic provided by setuptools
        msvc_env_lines.append(error1.format(
            build_vcvarsall_cmd(distutils_find_vcvarsall(9))))
    else:
        # Visual Studio 14 or otherwise
        msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))

    return '\n'.join(msvc_env_lines) + '\n'
Beispiel #49
0
def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, retry=0,
               locks=None, is_cross=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if config.debug:
        external_logger_context = utils.LoggingContext(logging.DEBUG)
    else:
        external_logger_context = utils.LoggingContext(logging.ERROR)

    with external_logger_context:
        log = utils.get_logger(__name__)

        # if os.path.isdir(prefix):
        #     utils.rm_rf(prefix)

        if specs_or_actions:  # Don't waste time if there is nothing to do
            log.debug("Creating environment in %s", prefix)
            log.debug(str(specs_or_actions))

            with utils.path_prepended(prefix):
                if not locks:
                    locks = utils.get_conda_operation_locks(config)
                try:
                    with utils.try_acquire_locks(locks, timeout=config.timeout):
                        # input is a list - it's specs in MatchSpec format
                        if not hasattr(specs_or_actions, 'keys'):
                            specs = list(set(specs_or_actions))
                            actions = get_install_actions(prefix, tuple(specs), env,
                                                          subdir=subdir,
                                                          verbose=config.verbose,
                                                          debug=config.debug,
                                                          locking=config.locking,
                                                          bldpkgs_dirs=tuple(config.bldpkgs_dirs),
                                                          timeout=config.timeout,
                                                          disable_pip=config.disable_pip,
                                                          max_env_retry=config.max_env_retry,
                                                          output_folder=config.output_folder,
                                                          channel_urls=tuple(config.channel_urls))
                        else:
                            actions = specs_or_actions
                        index, index_ts = get_build_index(subdir=subdir,
                                                        bldpkgs_dir=config.bldpkgs_dir,
                                                        output_folder=config.output_folder,
                                                        channel_urls=config.channel_urls,
                                                        debug=config.debug,
                                                        verbose=config.verbose,
                                                        locking=config.locking,
                                                        timeout=config.timeout)
                        utils.trim_empty_keys(actions)
                        display_actions(actions, index)
                        if utils.on_win:
                            for k, v in os.environ.items():
                                os.environ[k] = str(v)
                        execute_actions(actions, index, verbose=config.debug)
                except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError,
                        CondaError) as exc:
                    if (("too short in" in str(exc) or
                            re.search('post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or
                            isinstance(exc, PaddingError)) and
                            config.prefix_length > 80):
                        if config.prefix_length_fallback:
                            log.warn("Build prefix failed with prefix length %d",
                                     config.prefix_length)
                            log.warn("Error was: ")
                            log.warn(str(exc))
                            log.warn("One or more of your package dependencies needs to be rebuilt "
                                    "with a longer prefix length.")
                            log.warn("Falling back to legacy prefix length of 80 characters.")
                            log.warn("Your package will not install into prefixes > 80 characters.")
                            config.prefix_length = 80

                            # Set this here and use to create environ
                            #   Setting this here is important because we use it below (symlink)
                            prefix = config.build_prefix
                            actions['PREFIX'] = prefix

                            create_env(prefix, actions, config=config, subdir=subdir, env=env,
                                       clear_cache=clear_cache, is_cross=is_cross)
                        else:
                            raise
                    elif 'lock' in str(exc):
                        if retry < config.max_env_retry:
                            log.warn("failed to create env, retrying.  exception was: %s", str(exc))
                            create_env(prefix, actions, config=config, subdir=subdir, env=env,
                                    clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross)
                    elif ('requires a minimum conda version' in str(exc) or
                          'link a source that does not' in str(exc)):
                        with utils.try_acquire_locks(locks, timeout=config.timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn("I think conda ended up with a partial extraction for %s.  "
                                     "Removing the folder and retrying", pkg_dir)
                            if os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                        if retry < config.max_env_retry:
                            log.warn("failed to create env, retrying.  exception was: %s", str(exc))
                            create_env(prefix, actions, config=config, subdir=subdir, env=env,
                                       clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross)
                        else:
                            log.error("Failed to create env, max retries exceeded.")
                            raise
                    else:
                        raise
                # HACK: some of the time, conda screws up somehow and incomplete packages result.
                #    Just retry.
                except (AssertionError, IOError, ValueError, RuntimeError, LockError) as exc:
                    if isinstance(exc, AssertionError):
                        with utils.try_acquire_locks(locks, timeout=config.timeout):
                            pkg_dir = os.path.dirname(os.path.dirname(str(exc)))
                            log.warn("I think conda ended up with a partial extraction for %s.  "
                                     "Removing the folder and retrying", pkg_dir)
                            if os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retry < config.max_env_retry:
                        log.warn("failed to create env, retrying.  exception was: %s", str(exc))
                        create_env(prefix, actions, config=config, subdir=subdir, env=env,
                                   clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross)
                    else:
                        log.error("Failed to create env, max retries exceeded.")
                        raise
    # We must not symlink conda across different platforms when cross-compiling.
    #  On second thought, I think we must, because activating the host env does
    #     the symlink for us anyway, and when activate does it, we end up with
    #     conda symlinks in every package. =()
    # if os.path.basename(prefix) == '_build_env' or not is_cross:
    if utils.on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Beispiel #50
0
def inspect_linkages(packages, prefix=sys.prefix, untracked=False,
                     all_packages=False, show_files=False, groupby="package"):
    pkgmap = {}

    installed = _installed(prefix)

    if not packages and not untracked and not all_packages:
        raise ValueError("At least one package or --untracked or --all must be provided")

    if all_packages:
        packages = sorted(installed.keys())

    if untracked:
        packages.append(untracked_package)

    for pkg in packages:
        if pkg == untracked_package:
            dist = untracked_package
        elif pkg not in installed:
            sys.exit("Package %s is not installed in %s" % (pkg, prefix))
        else:
            dist = installed[pkg]

        if not sys.platform.startswith(('linux', 'darwin')):
            sys.exit("Error: conda inspect linkages is only implemented in Linux and OS X")

        if dist == untracked_package:
            obj_files = get_untracked_obj_files(prefix)
        else:
            obj_files = get_package_obj_files(dist, prefix)
        linkages = get_linkages(obj_files, prefix)
        depmap = defaultdict(list)
        pkgmap[pkg] = depmap
        depmap['not found'] = []
        depmap['system'] = []
        for binary in linkages:
            for lib, path in linkages[binary]:
                path = replace_path(binary, path, prefix) if path not in {'',
                                                                            'not found'} else path
                if path.startswith(prefix):
                    deps = list(which_package(path))
                    if len(deps) > 1:
                        deps_str = [str(dep) for dep in deps]
                        get_logger(__name__).warn("Warning: %s comes from multiple "
                                                  "packages: %s", path, comma_join(deps_str))
                    if not deps:
                        if exists(path):
                            depmap['untracked'].append((lib, path.split(prefix +
                                '/', 1)[-1], binary))
                        else:
                            depmap['not found'].append((lib, path.split(prefix +
                                '/', 1)[-1], binary))
                    for d in deps:
                        depmap[d].append((lib, path.split(prefix + '/',
                            1)[-1], binary))
                elif path == 'not found':
                    depmap['not found'].append((lib, path, binary))
                else:
                    depmap['system'].append((lib, path, binary))

    output_string = ""
    if groupby == 'package':
        for pkg in packages:
            output_string += _underlined_text(pkg)
            output_string += print_linkages(pkgmap[pkg], show_files=show_files)

    elif groupby == 'dependency':
        # {pkg: {dep: [files]}} -> {dep: {pkg: [files]}}
        inverted_map = defaultdict(lambda: defaultdict(list))
        for pkg in pkgmap:
            for dep in pkgmap[pkg]:
                if pkgmap[pkg][dep]:
                    inverted_map[dep][pkg] = pkgmap[pkg][dep]

        # print system and not found last
        k = sorted(set(inverted_map.keys()) - {'system', 'not found'})
        for dep in k + ['system', 'not found']:
            output_string += _underlined_text(dep)
            output_string += print_linkages(inverted_map[dep], show_files=show_files)

    else:
        raise ValueError("Unrecognized groupby: %s" % groupby)
    if hasattr(output_string, 'decode'):
        output_string = output_string.decode('utf-8')
    return output_string
Beispiel #51
0
def verify_git_repo(git_exe,
                    git_dir,
                    git_url,
                    git_commits_since_tag,
                    debug=False,
                    expected_rev='HEAD'):
    env = os.environ.copy()
    log = utils.get_logger(__name__)

    if debug:
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stderr = FNULL

    if not expected_rev:
        return False

    OK = True

    env['GIT_DIR'] = git_dir
    try:
        # Verify current commit (minus our locally applied patches) matches expected commit
        current_commit = utils.check_output_env([
            git_exe, "log", "-n1", "--format=%H",
            "HEAD" + "^" * git_commits_since_tag
        ],
                                                env=env,
                                                stderr=stderr)
        current_commit = current_commit.decode('utf-8')
        expected_tag_commit = utils.check_output_env(
            [git_exe, "log", "-n1", "--format=%H", expected_rev],
            env=env,
            stderr=stderr)
        expected_tag_commit = expected_tag_commit.decode('utf-8')

        if current_commit != expected_tag_commit:
            return False

        # Verify correct remote url. Need to find the git cache directory,
        # and check the remote from there.
        cache_details = utils.check_output_env([git_exe, "remote", "-v"],
                                               env=env,
                                               stderr=stderr)
        cache_details = cache_details.decode('utf-8')
        cache_dir = cache_details.split('\n')[0].split()[1]

        if not isinstance(cache_dir, str):
            # On Windows, subprocess env can't handle unicode.
            cache_dir = cache_dir.encode(sys.getfilesystemencoding()
                                         or 'utf-8')

        try:
            remote_details = utils.check_output_env(
                [git_exe, "--git-dir", cache_dir, "remote", "-v"],
                env=env,
                stderr=stderr)
        except subprocess.CalledProcessError:
            if sys.platform == 'win32' and cache_dir.startswith('/'):
                cache_dir = utils.convert_unix_path_to_win(cache_dir)
            remote_details = utils.check_output_env(
                [git_exe, "--git-dir", cache_dir, "remote", "-v"],
                env=env,
                stderr=stderr)
        remote_details = remote_details.decode('utf-8')
        remote_url = remote_details.split('\n')[0].split()[1]

        # on windows, remote URL comes back to us as cygwin or msys format.  Python doesn't
        # know how to normalize it.  Need to convert it to a windows path.
        if sys.platform == 'win32' and remote_url.startswith('/'):
            remote_url = utils.convert_unix_path_to_win(git_url)

        if os.path.exists(remote_url):
            # Local filepaths are allowed, but make sure we normalize them
            remote_url = normpath(remote_url)

        # If the current source directory in conda-bld/work doesn't match the user's
        # metadata git_url or git_rev, then we aren't looking at the right source.
        if not os.path.isdir(
                remote_url) and remote_url.lower() != git_url.lower():
            log.debug("remote does not match git_url")
            log.debug("Remote: " + remote_url.lower())
            log.debug("git_url: " + git_url.lower())
            OK = False
    except subprocess.CalledProcessError as error:
        log.debug(
            "Error obtaining git information in verify_git_repo.  Error was: ")
        log.debug(str(error))
        OK = False
    finally:
        if not debug:
            FNULL.close()
    return OK
Beispiel #52
0
def _combine_spec_dictionaries(specs, extend_keys=None, filter_keys=None, zip_keys=None,
                               log_output=True):
    # each spec is a dictionary.  Each subsequent spec replaces the previous one.
    #     Only the last one with the key stays.
    values = {}
    keys = ensure_list(filter_keys)
    extend_keys = ensure_list(extend_keys)

    for spec_source, spec in specs.items():
        if spec:
            if log_output:
                log = get_logger(__name__)
                log.info("Adding in variants from {}".format(spec_source))
            for k, v in spec.items():
                if not keys or k in keys:
                    if k in extend_keys:
                        # update dictionaries, extend lists
                        if hasattr(v, 'keys'):
                            if k in values and hasattr(values[k], 'keys'):
                                values[k].update(v)
                            else:
                                values[k] = v.copy()
                        else:
                            values[k] = ensure_list(values.get(k, []))
                            values[k].extend(ensure_list(v))
                            # uniquify
                            values[k] = list(set(values[k]))
                    elif k == 'zip_keys':
                        v = [subval for subval in v if subval]
                        if not isinstance(v[0], list) and not isinstance(v[0], tuple):
                            v = [v]
                        # should always be a list of lists, but users may specify as just a list
                        values[k] = values.get(k, [])
                        values[k].extend(v)
                        values[k] = list(list(set_group) for set_group in set(tuple(group)
                                                                        for group in values[k]))
                    else:
                        if hasattr(v, 'keys'):
                            values[k] = v.copy()
                        else:
                            # default "group" is just this one key.  We latch onto other groups if
                            #     they exist
                            keys_in_group = [k]
                            if zip_keys:
                                for group in zip_keys:
                                    if k in group:
                                        keys_in_group = group
                                        break
                            # in order to clobber, one must replace ALL of the zipped keys.
                            #    Otherwise, we filter later.
                            if all(group_item in spec for group_item in keys_in_group):
                                for group_item in keys_in_group:
                                    if len(ensure_list(spec[group_item])) != len(ensure_list(v)):
                                        raise ValueError("All entries associated by a zip_key "
                                    "field must be the same length.  In {}, {} and {} are "
                                    "different ({} and {})".format(spec_source, k, group_item,
                                                                len(ensure_list(v)),
                                                                len(ensure_list(spec[group_item]))))
                                    values[group_item] = ensure_list(spec[group_item])
                            else:
                                if k in values and any(subvalue not in values[k]
                                                    for subvalue in ensure_list(v)):

                                    raise ValueError("variant config in {} is ambiguous because it "
                                        "does not fully implement all zipped keys, or specifies "
                                        "a subspace that is not fully implemented.".format(
                                            spec_source))

    return values
Beispiel #53
0
def verify_git_repo(git_dir, git_url, git_commits_since_tag, debug=False, expected_rev='HEAD'):
    env = os.environ.copy()
    log = utils.get_logger(__name__)

    if debug:
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stderr = FNULL

    if not expected_rev:
        return False

    OK = True

    env['GIT_DIR'] = git_dir
    try:
        # Verify current commit (minus our locally applied patches) matches expected commit
        current_commit = utils.check_output_env(["git",
                                                 "log",
                                                 "-n1",
                                                 "--format=%H",
                                                 "HEAD" + "^" * git_commits_since_tag],
                                                env=env, stderr=stderr)
        current_commit = current_commit.decode('utf-8')
        expected_tag_commit = utils.check_output_env(["git", "log", "-n1", "--format=%H",
                                                      expected_rev],
                                                     env=env, stderr=stderr)
        expected_tag_commit = expected_tag_commit.decode('utf-8')

        if current_commit != expected_tag_commit:
            return False

        # Verify correct remote url. Need to find the git cache directory,
        # and check the remote from there.
        cache_details = utils.check_output_env(["git", "remote", "-v"], env=env,
                                               stderr=stderr)
        cache_details = cache_details.decode('utf-8')
        cache_dir = cache_details.split('\n')[0].split()[1]

        if not isinstance(cache_dir, str):
            # On Windows, subprocess env can't handle unicode.
            cache_dir = cache_dir.encode(sys.getfilesystemencoding() or 'utf-8')

        try:
            remote_details = utils.check_output_env(["git", "--git-dir", cache_dir,
                                                     "remote", "-v"],
                                                     env=env, stderr=stderr)
        except subprocess.CalledProcessError:
            if sys.platform == 'win32' and cache_dir.startswith('/'):
                cache_dir = utils.convert_unix_path_to_win(cache_dir)
            remote_details = utils.check_output_env(["git", "--git-dir", cache_dir,
                                                      "remote", "-v"],
                                                     env=env, stderr=stderr)
        remote_details = remote_details.decode('utf-8')
        remote_url = remote_details.split('\n')[0].split()[1]

        # on windows, remote URL comes back to us as cygwin or msys format.  Python doesn't
        # know how to normalize it.  Need to convert it to a windows path.
        if sys.platform == 'win32' and remote_url.startswith('/'):
            remote_url = utils.convert_unix_path_to_win(git_url)

        if os.path.exists(remote_url):
            # Local filepaths are allowed, but make sure we normalize them
            remote_url = normpath(remote_url)

        # If the current source directory in conda-bld/work doesn't match the user's
        # metadata git_url or git_rev, then we aren't looking at the right source.
        if not os.path.isdir(remote_url) and remote_url.lower() != git_url.lower():
            log.debug("remote does not match git_url")
            log.debug("Remote: " + remote_url.lower())
            log.debug("git_url: " + git_url.lower())
            OK = False
    except subprocess.CalledProcessError as error:
        log.debug("Error obtaining git information in verify_git_repo.  Error was: ")
        log.debug(str(error))
        OK = False
    finally:
        if not debug:
            FNULL.close()
    return OK
Beispiel #54
0
def msvc_env_cmd(bits, config, override=None):
    log = get_logger(__name__)
    log.warn("Using legacy MSVC compiler setup.  This will be removed in conda-build 4.0. "
             "If this recipe does not use a compiler, this message is safe to ignore.  "
             "Otherwise, use {{compiler('<language>')}} jinja2 in requirements/build.")
    if override:
        log.warn("msvc_compiler key in meta.yaml is deprecated. Use the new"
        "variant-powered compiler configuration instead. Note that msvc_compiler"
        "is incompatible with the new \{\{compiler('c')\}\} jinja scheme.")
    # this has been an int at times.  Make sure it's a string for consistency.
    bits = str(bits)
    arch_selector = 'x86' if bits == '32' else 'amd64'

    msvc_env_lines = []

    version = None
    if override is not None:
        version = override

    # The DISTUTILS_USE_SDK variable tells distutils to not try and validate
    # the MSVC compiler. For < 3.5 this still forcibly looks for 'cl.exe'.
    # For > 3.5 it literally just skips the validation logic.
    # See distutils _msvccompiler.py and msvc9compiler.py / msvccompiler.py
    # for more information.
    msvc_env_lines.append('set DISTUTILS_USE_SDK=1')
    # This is also required to hit the 'don't validate' logic on < 3.5.
    # For > 3.5 this is ignored.
    msvc_env_lines.append('set MSSdk=1')

    if not version:
        py_ver = config.variant.get('python', get_default_variant(config)['python'])
        if int(py_ver[0]) >= 3:
            if int(py_ver.split('.')[1]) < 5:
                version = '10.0'
            version = '14.0'
        else:
            version = '9.0'

    if float(version) >= 14.0:
        # For Python 3.5+, ensure that we link with the dynamic runtime.  See
        # http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info
        msvc_env_lines.append('set PY_VCRUNTIME_REDIST=%LIBRARY_BIN%\\vcruntime{0}.dll'.format(
            version.replace('.', '')))

    vcvarsall_vs_path = build_vcvarsall_vs_path(version)

    def build_vcvarsall_cmd(cmd, arch=arch_selector):
        # Default argument `arch_selector` is defined above
        return 'call "{cmd}" {arch}'.format(cmd=cmd, arch=arch)

    msvc_env_lines.append('set "VS_VERSION={}"'.format(version))
    msvc_env_lines.append('set "VS_MAJOR={}"'.format(version.split('.')[0]))
    msvc_env_lines.append('set "VS_YEAR={}"'.format(VS_VERSION_STRING[version][-4:]))
    msvc_env_lines.append('set "CMAKE_GENERATOR={}"'.format(VS_VERSION_STRING[version] +
                                                            {'64': ' Win64', '32': ''}[bits]))
    # tell msys2 to ignore path conversions for issue-causing windows-style flags in build
    #   See https://github.com/conda-forge/icu-feedstock/pull/5
    msvc_env_lines.append('set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out"')
    msvc_env_lines.append('set "MSYS2_ENV_CONV_EXCL=CL"')
    if version == '10.0':
        try:
            WIN_SDK_71_PATH = Reg.get_value(os.path.join(WINSDK_BASE, 'v7.1'),
                                            'installationfolder')
            WIN_SDK_71_BAT_PATH = os.path.join(WIN_SDK_71_PATH, 'Bin', 'SetEnv.cmd')

            win_sdk_arch = '/Release /x86' if bits == '32' else '/Release /x64'
            win_sdk_cmd = build_vcvarsall_cmd(WIN_SDK_71_BAT_PATH, arch=win_sdk_arch)

            # There are two methods of building Python 3.3 and 3.4 extensions (both
            # of which required Visual Studio 2010 - as explained in the Python wiki
            # https://wiki.python.org/moin/WindowsCompilers)
            # 1) Use the Windows SDK 7.1
            # 2) Use Visual Studio 2010 (any edition)
            # However, VS2010 never shipped with a 64-bit compiler, so in this case
            # **only** option (1) applies. For this reason, we always try and
            # activate the Windows SDK first. Unfortunately, unsuccessfully setting
            # up the environment does **not EXIT 1** and therefore we must fall
            # back to attempting to set up VS2010.
            # DelayedExpansion is required for the SetEnv.cmd
            msvc_env_lines.append('Setlocal EnableDelayedExpansion')
            msvc_env_lines.append(win_sdk_cmd)
            # If the WindowsSDKDir environment variable has not been successfully
            # set then try activating VS2010
            msvc_env_lines.append('if not "%WindowsSDKDir%" == "{}" ( {} )'.format(
                WIN_SDK_71_PATH, build_vcvarsall_cmd(vcvarsall_vs_path)))
        # sdk is not installed.  Fall back to only trying VS 2010
        except KeyError:
            msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))
    elif version == '9.0':
        # Get the Visual Studio 2008 path (not the Visual C++ for Python path)
        # and get the 'vcvars64.bat' from inside the bin (in the directory above
        # that returned by distutils_find_vcvarsall)
        try:
            VCVARS64_VS9_BAT_PATH = os.path.join(os.path.dirname(distutils_find_vcvarsall(9)),
                                                'bin', 'vcvars64.bat')
        # there's an exception if VS or the VC compiler for python are not actually installed.
        except (KeyError, TypeError):
            VCVARS64_VS9_BAT_PATH = None

        error1 = 'IF %ERRORLEVEL% NEQ 0 {}'

        # Prefer VS9 proper over Microsoft Visual C++ Compiler for Python 2.7
        msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))
        # The Visual Studio 2008 Express edition does not properly contain
        # the amd64 build files, so we call the vcvars64.bat manually,
        # rather than using the vcvarsall.bat which would try and call the
        # missing bat file.
        if arch_selector == 'amd64' and VCVARS64_VS9_BAT_PATH:
            msvc_env_lines.append(error1.format(
                build_vcvarsall_cmd(VCVARS64_VS9_BAT_PATH)))
        # Otherwise, fall back to icrosoft Visual C++ Compiler for Python 2.7+
        # by using the logic provided by setuptools
        msvc_env_lines.append(error1.format(
            build_vcvarsall_cmd(distutils_find_vcvarsall(9))))
    else:
        # Visual Studio 14 or otherwise
        msvc_env_lines.append(build_vcvarsall_cmd(vcvarsall_vs_path))

    return '\n'.join(msvc_env_lines) + '\n'
Beispiel #55
0
def update_index(dir_path, force=False, check_md5=False, remove=True, lock=None,
                 could_be_mirror=True, verbose=True, locking=True, timeout=90):
    """
    Update all index files in dir_path with changed packages.

    :param verbose: Should detailed status messages be output?
    :type verbose: bool
    :param force: Whether to re-index all packages (including those that
                  haven't changed) or not.
    :type force: bool
    :param check_md5: Whether to check MD5s instead of mtimes for determining
                      if a package changed.
    :type check_md5: bool
    """

    log = utils.get_logger(__name__)

    log.debug("updating index in: %s", dir_path)
    if not os.path.isdir(dir_path):
        os.makedirs(dir_path)

    index_path = join(dir_path, '.index.json')

    if not lock:
        lock = get_lock(dir_path)

    locks = []
    if locking:
        locks.append(lock)

    index = {}

    with try_acquire_locks(locks, timeout):
        if not force:
            try:
                mode_dict = {'mode': 'r', 'encoding': 'utf-8'} if PY3 else {'mode': 'rb'}
                with open(index_path, **mode_dict) as fi:
                    index = json.load(fi)
            except (IOError, ValueError):
                index = {}

        files = set(fn for fn in os.listdir(dir_path) if fn.endswith('.tar.bz2'))
        for fn in files:
            path = join(dir_path, fn)
            if fn in index:
                if check_md5:
                    if index[fn]['md5'] == md5_file(path):
                        continue
                elif index[fn]['mtime'] == getmtime(path):
                    continue
            if verbose:
                print('updating:', fn)
            d = read_index_tar(path, lock=lock, locking=locking, timeout=timeout)
            d.update(file_info(path))
            index[fn] = d

        for fn in files:
            index[fn]['sig'] = '.' if isfile(join(dir_path, fn + '.sig')) else None

        if remove:
            # remove files from the index which are not on disk
            for fn in set(index) - files:
                if verbose:
                    print("removing:", fn)
                del index[fn]

        # Deal with Python 2 and 3's different json module type reqs
        mode_dict = {'mode': 'w', 'encoding': 'utf-8'} if PY3 else {'mode': 'wb'}
        with open(index_path, **mode_dict) as fo:
            json.dump(index, fo, indent=2, sort_keys=True, default=str)

        # --- new repodata
        for fn in index:
            info = index[fn]
            for varname in 'arch', 'platform', 'mtime', 'ucs':
                try:
                    del info[varname]
                except KeyError:
                    pass

            if 'requires' in info and 'depends' not in info:
                info['depends'] = info['requires']

        repodata = {'packages': index, 'info': {}}
        write_repodata(repodata, dir_path, lock=lock, locking=locking, timeout=timeout)