コード例 #1
0
ファイル: utils.py プロジェクト: timsnyder/conda-build
def tar_xf(tarball, dir_path, mode='r:*'):
    if tarball.lower().endswith('.tar.z'):
        uncompress = external.find_executable('uncompress')
        if not uncompress:
            uncompress = external.find_executable('gunzip')
        if not uncompress:
            sys.exit("""\
uncompress (or gunzip) is required to unarchive .z source files.
""")
        check_call_env([uncompress, '-f', tarball])
        tarball = tarball[:-2]
    if not PY3 and tarball.endswith('.tar.xz'):
        unxz = external.find_executable('unxz')
        if not unxz:
            sys.exit("""\
unxz is required to unarchive .xz source files.
""")

        check_call_env([unxz, '-f', '-k', tarball])
        tarball = tarball[:-3]
    t = tarfile.open(tarball, mode)
    if not PY3:
        t.extractall(path=dir_path.encode(codec))
    else:
        t.extractall(path=dir_path)
    t.close()
コード例 #2
0
ファイル: environ.py プロジェクト: jjhelmus/conda-build
def meta_vars(meta, config):
    d = {}
    for var_name in ensure_list(meta.get_value('build/script_env', [])):
        value = os.getenv(var_name)
        if value is None:
            warnings.warn(
                "The environment variable '%s' is undefined." % var_name,
                UserWarning
            )
        else:
            d[var_name] = value

    git_dir = join(config.work_dir, '.git')
    hg_dir = join(config.work_dir, '.hg')

    if not isinstance(git_dir, str):
        # On Windows, subprocess env can't handle unicode.
        git_dir = git_dir.encode(sys.getfilesystemencoding() or 'utf-8')

    if external.find_executable('git', config.build_prefix) and os.path.exists(git_dir):
        git_url = meta.get_value('source/git_url')

        if os.path.exists(git_url):
            if sys.platform == 'win32':
                git_url = utils.convert_unix_path_to_win(git_url)
            # If git_url is a relative path instead of a url, convert it to an abspath
            git_url = normpath(join(meta.path, git_url))

        _x = False

        if git_url:
            _x = verify_git_repo(git_dir,
                                 git_url,
                                 config.git_commits_since_tag,
                                 config.debug,
                                 meta.get_value('source/git_rev', 'HEAD'))

        if _x or meta.get_value('source/path'):
            d.update(get_git_info(git_dir, config.debug))

    elif external.find_executable('hg', config.build_prefix) and os.path.exists(hg_dir):
        d.update(get_hg_build_info(hg_dir))

    # use `get_value` to prevent early exit while name is still unresolved during rendering
    d['PKG_NAME'] = meta.get_value('package/name')
    d['PKG_VERSION'] = meta.version()
    d['PKG_BUILDNUM'] = str(meta.build_number() or 0)
    if meta.final:
        d['PKG_BUILD_STRING'] = str(meta.build_id())
    d['RECIPE_DIR'] = (meta.path if meta.path else
                       meta.meta.get('extra', {}).get('parent_recipe', {}).get('path', ''))
    return d
コード例 #3
0
ファイル: environ.py プロジェクト: bioconda/conda-build
def meta_vars(meta, config):
    d = {}
    for var_name in meta.get_value('build/script_env', []):
        value = os.getenv(var_name)
        if value is None:
            warnings.warn(
                "The environment variable '%s' is undefined." % var_name,
                UserWarning
            )
        else:
            d[var_name] = value

    git_dir = join(source.get_dir(config), '.git')
    hg_dir = join(source.get_dir(config), '.hg')

    if not isinstance(git_dir, str):
        # On Windows, subprocess env can't handle unicode.
        git_dir = git_dir.encode(sys.getfilesystemencoding() or 'utf-8')

    if external.find_executable('git', config.build_prefix) and os.path.exists(git_dir):
        git_url = meta.get_value('source/git_url')

        if os.path.exists(git_url):
            if sys.platform == 'win32':
                git_url = utils.convert_unix_path_to_win(git_url)
            # If git_url is a relative path instead of a url, convert it to an abspath
            git_url = normpath(join(meta.path, git_url))

        _x = False

        if git_url:
            _x = verify_git_repo(git_dir,
                                 git_url,
                                 config,
                                 meta.get_value('source/git_rev', 'HEAD'))

        if _x or meta.get_value('source/path'):
            d.update(get_git_info(git_dir, config))

    elif external.find_executable('hg', config.build_prefix) and os.path.exists(hg_dir):
        d.update(get_hg_build_info(hg_dir))

    d['PKG_NAME'] = meta.name()
    d['PKG_VERSION'] = meta.version()
    d['PKG_BUILDNUM'] = str(meta.build_number())
    d['PKG_BUILD_STRING'] = str(meta.build_id())
    d['RECIPE_DIR'] = meta.path
    return d
コード例 #4
0
ファイル: source.py プロジェクト: ESSS/conda-build
def apply_patch(src_dir, path, config, git=None):
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    files, is_git_format = _get_patch_file_details(path)
    if git and is_git_format:
        # Prevents git from asking interactive questions,
        # also necessary to achieve sha1 reproducibility;
        # as is --committer-date-is-author-date. By this,
        # we mean a round-trip of git am/git format-patch
        # gives the same file.
        git_env = os.environ
        git_env['GIT_COMMITTER_NAME'] = 'conda-build'
        git_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'
        check_call_env([git, 'am', '--committer-date-is-author-date', path],
                       cwd=src_dir, stdout=None, env=git_env)
    else:
        print('Applying patch: %r' % path)
        patch = external.find_executable('patch', config.build_prefix)
        if patch is None:
            sys.exit("""\
        Error:
            Cannot use 'git' (not a git repo and/or patch) and did not find 'patch' in: %s
            You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
            or conda, m2-patch (Windows),
        """ % (os.pathsep.join(external.dir_paths)))
        patch_strip_level = _guess_patch_strip_level(files, src_dir)
        patch_args = ['-p%d' % patch_strip_level, '-i', path]
        if sys.platform == 'win32':
            patch_args[-1] = _ensure_unix_line_endings(path)
        check_call_env([patch] + patch_args, cwd=src_dir)
        if sys.platform == 'win32' and os.path.exists(patch_args[-1]):
            os.remove(patch_args[-1])  # clean up .patch_unix file
コード例 #5
0
ファイル: source.py プロジェクト: ESSS/conda-build
def git_source(meta, recipe_dir, config):
    ''' Download a source from a Git repo (or submodule, recursively) '''
    if not isdir(config.git_cache):
        os.makedirs(config.git_cache)

    git = external.find_executable('git')
    if not git:
        sys.exit("Error: git is not installed")

    git_url = meta['git_url']
    git_depth = int(meta.get('git_depth', -1))
    git_ref = meta.get('git_rev', 'HEAD')

    if git_url.startswith('.'):
        # It's a relative path from the conda recipe
        git_url = abspath(normpath(os.path.join(recipe_dir, git_url)))
        if sys.platform == 'win32':
            git_dn = git_url.replace(':', '_')
        else:
            git_dn = git_url[1:]
    else:
        git_dn = git_url.split('://')[-1].replace('/', os.sep)
        if git_dn.startswith(os.sep):
            git_dn = git_dn[1:]
        git_dn = git_dn.replace(':', '_')
    mirror_dir = join(config.git_cache, git_dn)
    git_mirror_checkout_recursive(
        git, mirror_dir, config.work_dir, git_url, config, git_ref, git_depth, True)
    return git
コード例 #6
0
ファイル: source.py プロジェクト: mingwandroid/conda-build
def git_source(source_dict, git_cache, src_dir, recipe_path=None, verbose=True):
    ''' Download a source from a Git repo (or submodule, recursively) '''
    if not isdir(git_cache):
        os.makedirs(git_cache)

    git = external.find_executable('git')
    if not git:
        sys.exit("Error: git is not installed in your root environment.")

    git_url = source_dict['git_url']
    git_depth = int(source_dict.get('git_depth', -1))
    git_ref = source_dict.get('git_rev') or 'HEAD'

    if git_url.startswith('.'):
        # It's a relative path from the conda recipe
        git_url = abspath(normpath(os.path.join(recipe_path, git_url)))
        if sys.platform == 'win32':
            git_dn = git_url.replace(':', '_')
        else:
            git_dn = git_url[1:]
    else:
        git_dn = git_url.split('://')[-1].replace('/', os.sep)
        if git_dn.startswith(os.sep):
            git_dn = git_dn[1:]
        git_dn = git_dn.replace(':', '_')
    mirror_dir = join(git_cache, git_dn)
    git_mirror_checkout_recursive(
        git, mirror_dir, src_dir, git_url, git_cache=git_cache, git_ref=git_ref,
        git_depth=git_depth, is_top_level=True, verbose=verbose)
    return git
コード例 #7
0
ファイル: source.py プロジェクト: bioconda/conda-build
def git_source(meta, recipe_dir, config):
    """ Download a source from a Git repo (or submodule, recursively) """
    if not isdir(config.git_cache):
        os.makedirs(config.git_cache)

    git = external.find_executable("git")
    if not git:
        sys.exit("Error: git is not installed")

    git_url = meta["git_url"]
    git_depth = int(meta.get("git_depth", -1))
    git_ref = meta.get("git_rev", "HEAD")

    if git_url.startswith("."):
        # It's a relative path from the conda recipe
        git_url = abspath(normpath(os.path.join(recipe_dir, git_url)))
        if sys.platform == "win32":
            git_dn = git_url.replace(":", "_")
        else:
            git_dn = git_url[1:]
    else:
        git_dn = git_url.split("://")[-1].replace("/", os.sep)
        if git_dn.startswith(os.sep):
            git_dn = git_dn[1:]
        git_dn = git_dn.replace(":", "_")
    mirror_dir = join(config.git_cache, git_dn)
    git_mirror_checkout_recursive(git, mirror_dir, config.work_dir, git_url, config, git_ref, git_depth, True)
    return git
コード例 #8
0
def meta_vars(meta, config):
    d = {}
    for var_name in ensure_list(meta.get_value("build/script_env", [])):
        value = os.getenv(var_name)
        if value is None:
            warnings.warn("The environment variable '%s' is undefined." % var_name, UserWarning)
        else:
            d[var_name] = value

    git_dir = join(config.work_dir, ".git")
    hg_dir = join(config.work_dir, ".hg")

    if not isinstance(git_dir, str):
        # On Windows, subprocess env can't handle unicode.
        git_dir = git_dir.encode(sys.getfilesystemencoding() or "utf-8")

    if external.find_executable("git", config.build_prefix) and os.path.exists(git_dir):
        git_url = meta.get_value("source/git_url")

        if os.path.exists(git_url):
            if sys.platform == "win32":
                git_url = utils.convert_unix_path_to_win(git_url)
            # If git_url is a relative path instead of a url, convert it to an abspath
            git_url = normpath(join(meta.path, git_url))

        _x = False

        if git_url:
            _x = verify_git_repo(git_dir, git_url, config, meta.get_value("source/git_rev", "HEAD"))

        if _x or meta.get_value("source/path"):
            d.update(get_git_info(git_dir, config))

    elif external.find_executable("hg", config.build_prefix) and os.path.exists(hg_dir):
        d.update(get_hg_build_info(hg_dir))

    # use `get_value` to prevent early exit while name is still unresolved during rendering
    d["PKG_NAME"] = meta.get_value("package/name")
    d["PKG_VERSION"] = meta.version()
    d["PKG_BUILDNUM"] = str(meta.build_number())
    d["PKG_BUILD_STRING"] = str(meta.build_id())
    d["RECIPE_DIR"] = meta.path
    return d
コード例 #9
0
ファイル: post.py プロジェクト: bioconda/conda-build
def mk_relative_linux(f, prefix, build_prefix=None, rpaths=('lib',)):
    path = join(prefix, f)
    if build_prefix is None:
        assert path.startswith(prefix + '/')
    else:
        prefix = build_prefix
    rpath = ':'.join('$ORIGIN/' + utils.relative(f, d) if not
        d.startswith('/') else d for d in rpaths)
    patchelf = external.find_executable('patchelf', prefix)
    print('patchelf: file: %s\n    setting rpath to: %s' % (path, rpath))
    call([patchelf, '--force-rpath', '--set-rpath', rpath, path])
コード例 #10
0
ファイル: utils.py プロジェクト: patanijo/conda-build
def convert_win_path_to_unix(path):
    if external.find_executable('cygpath'):
        cmd = "cygpath -u {0}".format(path)
        if PY3:
            path = subprocess.getoutput(cmd)
        else:
            path = subprocess.check_output(cmd.split()).rstrip().rstrip("\\")

    else:
        path = win_path_to_unix(path)
    return path
コード例 #11
0
ファイル: build.py プロジェクト: msarahan/conda-build
def check_external():
    if sys.platform.startswith('linux'):
        patchelf = external.find_executable('patchelf')
        if patchelf is None:
            sys.exit("""\
Error:
    Did not find 'patchelf' in: %s
    'patchelf' is necessary for building conda packages on Linux with
    relocatable ELF libraries.  You can install patchelf using conda install
    patchelf.
""" % (os.pathsep.join(external.dir_paths)))
コード例 #12
0
ファイル: build.py プロジェクト: msarahan/conda-build
def handle_anaconda_upload(path, config):
    from conda_build.os_utils.external import find_executable

    upload = False
    # this is the default, for no explicit argument.
    # remember that anaconda_upload takes defaults from condarc
    if config.anaconda_upload is None:
        pass
    # rc file has uploading explicitly turned off
    elif config.anaconda_upload is False:
        print("# Automatic uploading is disabled")
    else:
        upload = True

    if config.token or config.user:
        upload = True

    no_upload_message = """\
# If you want to upload this package to anaconda.org later, type:
#
# $ anaconda upload %s
#
# To have conda build upload to anaconda.org automatically, use
# $ conda config --set anaconda_upload yes
""" % path
    if not upload:
        print(no_upload_message)
        return

    anaconda = find_executable('anaconda')
    if anaconda is None:
        print(no_upload_message)
        sys.exit('''
Error: cannot locate anaconda command (required for upload)
# Try:
# $ conda install anaconda-client
''')
    print("Uploading to anaconda.org")
    cmd = [anaconda, ]

    if config.token:
        cmd.extend(['--token', config.token])
    cmd.append('upload')
    if config.user:
        cmd.extend(['--user', config.user])
    cmd.append(path)
    try:
        subprocess.call(cmd)
    except:
        print(no_upload_message)
        raise
コード例 #13
0
def execute(recipe_dirs,
            prefix=sys.prefix,
            no_pth_file=False,
            build_ext=False,
            clean=False,
            uninstall=False):

    if not isdir(prefix):
        sys.exit("""\
Error: environment does not exist: %s
#
# Use 'conda create' to create the environment first.
#""" % prefix)

    assert find_executable('python', prefix=prefix)

    # current environment's site-packages directory
    sp_dir = get_site_packages(
        prefix, '.'.join(
            (str(sys.version_info.major), str(sys.version_info.minor))))

    if type(recipe_dirs) == string_types:
        recipe_dirs = [recipe_dirs]

    for path in recipe_dirs:
        pkg_path = abspath(expanduser(path))

        if uninstall:
            # uninstall then exit - does not do any other operations
            _uninstall(sp_dir, pkg_path)
            return

        if clean or build_ext:
            setup_py = get_setup_py(pkg_path)
            if clean:
                _clean(setup_py)
                if not build_ext:
                    return

            # build extensions before adding to conda.pth
            if build_ext:
                _build_ext(setup_py)

        if not no_pth_file:
            write_to_conda_pth(sp_dir, pkg_path)

        # go through the source looking for compiled extensions and make sure
        # they use the conda environment for loading libraries at runtime
        relink_sharedobjects(pkg_path, prefix)
        print("completed operation for: " + pkg_path)
コード例 #14
0
def tar_xf(tarball, dir_path, mode='r:*'):
    if tarball.lower().endswith('.tar.z'):
        uncompress = external.find_executable('uncompress')
        if not uncompress:
            uncompress = external.find_executable('gunzip')
        if not uncompress:
            sys.exit("""\
uncompress (or gunzip) is required to unarchive .z source files.
""")
        check_call_env([uncompress, '-f', tarball])
        tarball = tarball[:-2]
    if not PY3 and tarball.endswith('.tar.xz'):
        unxz = external.find_executable('unxz')
        if not unxz:
            sys.exit("""\
unxz is required to unarchive .xz source files.
""")

        check_call_env([unxz, '-f', '-k', tarball])
        tarball = tarball[:-3]
    t = tarfile.open(tarball, mode)
    t.extractall(path=dir_path)
    t.close()
コード例 #15
0
def test_find_executable(testing_workdir, monkeypatch):
    if sys.platform != "win32":
        import stat

        path_components = []

        def create_file(unix_path, put_on_path, executable):
            localized_path = os.path.join(testing_workdir, *unix_path.split('/'))
            # empty prefix by default - extra bit at beginning of file
            if sys.platform == "win32":
                localized_path = localized_path + ".bat"

            dirname = os.path.split(localized_path)[0]
            if not os.path.isdir(dirname):
                os.makedirs(dirname)

            if sys.platform == "win32":
                prefix = "@echo off\n"
            else:
                prefix = "#!/bin/bash\nexec 1>&2\n"
            with open(localized_path, 'w') as f:
                f.write(prefix + """
            echo ******* You have reached the dummy {}. It is likely there is a bug in
            echo ******* conda that makes it not add the _build/bin directory onto the
            echo ******* PATH before running the source checkout tool
            exit -1
            """.format(localized_path))

            if put_on_path:
                path_components.append(dirname)

            if executable:
                st = os.stat(localized_path)
                os.chmod(localized_path, st.st_mode | stat.S_IEXEC)

            return localized_path

        create_file('executable/not/on/path/with/target_name', put_on_path=False, executable=True)
        create_file('non_executable/on/path/with/target_name', put_on_path=True, executable=False)
        create_file('executable/on/path/with/non_target_name', put_on_path=True, executable=True)
        target_path = create_file('executable/on/path/with/target_name', put_on_path=True, executable=True)
        create_file('another/executable/later/on/path/with/target_name', put_on_path=True, executable=True)

        monkeypatch.setenv('PATH', os.pathsep.join(path_components))

        find = find_executable('target_name')

        assert find == target_path, "Expected to find 'target_name' in '%s', but found it in '%s'" % (target_path, find)
コード例 #16
0
def test_find_executable(testing_workdir, monkeypatch):
    if sys.platform != "win32":
        import stat

        path_components = []

        def create_file(unix_path, put_on_path, executable):
            localized_path = os.path.join(testing_workdir, *unix_path.split('/'))
            # empty prefix by default - extra bit at beginning of file
            if sys.platform == "win32":
                localized_path = localized_path + ".bat"

            dirname = os.path.split(localized_path)[0]
            if not os.path.isdir(dirname):
                os.makedirs(dirname)

            if sys.platform == "win32":
                prefix = "@echo off\n"
            else:
                prefix = "#!/bin/bash\nexec 1>&2\n"
            with open(localized_path, 'w') as f:
                f.write(prefix + """
            echo ******* You have reached the dummy {}. It is likely there is a bug in
            echo ******* conda that makes it not add the _build/bin directory onto the
            echo ******* PATH before running the source checkout tool
            exit -1
            """.format(localized_path))

            if put_on_path:
                path_components.append(dirname)

            if executable:
                st = os.stat(localized_path)
                os.chmod(localized_path, st.st_mode | stat.S_IEXEC)

            return localized_path

        create_file('executable/not/on/path/with/target_name', put_on_path=False, executable=True)
        create_file('non_executable/on/path/with/target_name', put_on_path=True, executable=False)
        create_file('executable/on/path/with/non_target_name', put_on_path=True, executable=True)
        target_path = create_file('executable/on/path/with/target_name', put_on_path=True, executable=True)
        create_file('another/executable/later/on/path/with/target_name', put_on_path=True, executable=True)

        monkeypatch.setenv('PATH', os.pathsep.join(path_components))

        find = find_executable('target_name')

        assert find == target_path, "Expected to find 'target_name' in '%s', but found it in '%s'" % (target_path, find)
コード例 #17
0
def hg_source(metadata, config):
    ''' Download a source from Mercurial repo. '''
    if config.verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stdout = FNULL
        stderr = FNULL

    meta = metadata.get_section('source')

    hg = external.find_executable('hg', config.build_prefix)
    if not hg:
        sys.exit('Error: hg not installed')
    hg_url = meta['hg_url']
    if not isdir(config.hg_cache):
        os.makedirs(config.hg_cache)
    hg_dn = hg_url.split(':')[-1].replace('/', '_')
    cache_repo = join(config.hg_cache, hg_dn)
    if isdir(cache_repo):
        check_call_env([hg, 'pull'],
                       cwd=cache_repo,
                       stdout=stdout,
                       stderr=stderr)
    else:
        check_call_env([hg, 'clone', hg_url, cache_repo],
                       stdout=stdout,
                       stderr=stderr)
        assert isdir(cache_repo)

    # now clone in to work directory
    update = meta.get('hg_tag') or 'tip'
    if config.verbose:
        print('checkout: %r' % update)

    check_call_env([hg, 'clone', cache_repo, config.work_dir],
                   stdout=stdout,
                   stderr=stderr)
    check_call_env([hg, 'update', '-C', update],
                   cwd=config.work_dir,
                   stdout=stdout,
                   stderr=stderr)

    if not config.verbose:
        FNULL.close()

    return config.work_dir
コード例 #18
0
def git_info(src_dir, verbose=True, fo=None):
    ''' Print info about a Git repo. '''
    assert isdir(src_dir)

    git = external.find_executable('git')
    if not git:
        log = get_logger(__name__)
        log.warn(
            "git not installed in root environment.  Skipping recording of git info."
        )
        return

    if verbose:
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stderr = FNULL

    # Ensure to explicitly set GIT_DIR as some Linux machines will not
    # properly execute without it.
    env = os.environ.copy()
    env['GIT_DIR'] = join(src_dir, '.git')
    env = {str(key): str(value) for key, value in env.items()}
    for cmd, check_error in [('git log -n1', True),
                             ('git describe --tags --dirty', False),
                             ('git status', True)]:
        try:
            stdout = check_output_env(cmd.split(),
                                      stderr=stderr,
                                      cwd=src_dir,
                                      env=env)
        except CalledProcessError as e:
            if check_error:
                raise Exception("git error: %s" % str(e))
        encoding = locale.getpreferredencoding()
        if not fo:
            encoding = sys.stdout.encoding
        encoding = encoding or 'utf-8'
        if hasattr(stdout, 'decode'):
            stdout = stdout.decode(encoding, 'ignore')
        if fo:
            fo.write(u'==> %s <==\n' % cmd)
            if verbose:
                fo.write(stdout + u'\n')
        else:
            if verbose:
                print(u'==> %s <==\n' % cmd)
                safe_print_unicode(stdout + u'\n')
コード例 #19
0
def svn_source(meta, config):
    ''' Download a source from SVN repo. '''
    if config.verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stdout = FNULL
        stderr = FNULL

    def parse_bool(s):
        return str(s).lower().strip() in ('yes', 'true', '1', 'on')

    svn = external.find_executable('svn', config.build_prefix)
    if not svn:
        sys.exit("Error: svn is not installed")
    svn_url = meta['svn_url']
    svn_revision = meta.get('svn_rev') or 'head'
    svn_ignore_externals = parse_bool(meta.get('svn_ignore_externals') or 'no')
    if not isdir(config.svn_cache):
        os.makedirs(config.svn_cache)
    svn_dn = svn_url.split(':', 1)[-1].replace('/', '_').replace(':', '_')
    cache_repo = join(config.svn_cache, svn_dn)
    if svn_ignore_externals:
        extra_args = ['--ignore-externals']
    else:
        extra_args = []
    if isdir(cache_repo):
        check_call_env([svn, 'up', '-r', svn_revision] + extra_args,
                       cwd=cache_repo,
                       stdout=stdout,
                       stderr=stderr)
    else:
        check_call_env([svn, 'co', '-r', svn_revision] + extra_args +
                       [svn_url, cache_repo],
                       stdout=stdout,
                       stderr=stderr)
        assert isdir(cache_repo)

    # now copy into work directory
    copy_into(cache_repo, config.work_dir, config.timeout, symlinks=True)

    if not config.verbose:
        FNULL.close()

    return config.work_dir
コード例 #20
0
def mk_relative_linux(f, prefix, rpaths=('lib', )):
    'Respects the original values and converts abs to $ORIGIN-relative'

    elf = os.path.join(prefix, f)
    origin = os.path.dirname(elf)

    patchelf = external.find_executable('patchelf', prefix)
    try:
        existing = check_output([patchelf, '--print-rpath',
                                 elf]).decode('utf-8').splitlines()[0]
    except:
        print('patchelf: --print-rpath failed for %s\n' % (elf))
        return
    existing = existing.split(os.pathsep)
    new = []
    for old in existing:
        if old.startswith('$ORIGIN'):
            new.append(old)
        elif old.startswith('/'):
            # Test if this absolute path is outside of prefix. That is fatal.
            relpath = os.path.relpath(old, prefix)
            if relpath.startswith('..' + os.sep):
                print('Warning: rpath {0} is outside prefix {1} (removing it)'.
                      format(old, prefix))
            else:
                relpath = '$ORIGIN/' + os.path.relpath(old, origin)
                if relpath not in new:
                    new.append(relpath)
    # Ensure that the asked-for paths are also in new.
    for rpath in rpaths:
        if not rpath.startswith('/'):
            # IMHO utils.relative shouldn't exist, but I am too paranoid to remove
            # it, so instead, make sure that what I think it should be replaced by
            # gives the same result and assert if not. Yeah, I am a chicken.
            rel_ours = os.path.normpath(utils.relative(f, rpath))
            rel_stdlib = os.path.normpath(
                os.path.relpath(rpath, os.path.dirname(f)))
            assert rel_ours == rel_stdlib, \
                'utils.relative {0} and relpath {1} disagree for {2}, {3}'.format(
                rel_ours, rel_stdlib, f, rpath)
            rpath = '$ORIGIN/' + rel_stdlib
        if rpath not in new:
            new.append(rpath)
    rpath = ':'.join(new)
    print('patchelf: file: %s\n    setting rpath to: %s' % (elf, rpath))
    call([patchelf, '--force-rpath', '--set-rpath', rpath, elf])
コード例 #21
0
ファイル: develop.py プロジェクト: cav71/conda-build
def execute(recipe_dirs, prefix=sys.prefix, no_pth_file=False,
            build_ext=False, clean=False, uninstall=False):

    if not isdir(prefix):
        sys.exit("""\
Error: environment does not exist: %s
#
# Use 'conda create' to create the environment first.
#""" % prefix)

    assert find_executable('python', prefix=prefix)

    # current environment's site-packages directory
    sp_dir = get_site_packages(prefix, '.'.join((str(sys.version_info.major),
                                                 str(sys.version_info.minor))))

    if type(recipe_dirs) == string_types:
        recipe_dirs = [recipe_dirs]

    for path in recipe_dirs:
        pkg_path = abspath(expanduser(path))

        if uninstall:
            # uninstall then exit - does not do any other operations
            _uninstall(sp_dir, pkg_path)
            return

        if clean or build_ext:
            setup_py = get_setup_py(pkg_path)
            if clean:
                _clean(setup_py)
                if not build_ext:
                    return

            # build extensions before adding to conda.pth
            if build_ext:
                _build_ext(setup_py)

        if not no_pth_file:
            write_to_conda_pth(sp_dir, pkg_path)

        # go through the source looking for compiled extensions and make sure
        # they use the conda environment for loading libraries at runtime
        relink_sharedobjects(pkg_path, prefix)
        print("completed operation for: " + pkg_path)
コード例 #22
0
ファイル: source.py プロジェクト: mingwandroid/conda-build
def git_info(src_dir, verbose=True, fo=None):
    ''' Print info about a Git repo. '''
    assert isdir(src_dir)

    git = external.find_executable('git')
    if not git:
        log = get_logger(__name__)
        log.warn("git not installed in root environment.  Skipping recording of git info.")
        return

    if verbose:
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stderr = FNULL

    # Ensure to explicitly set GIT_DIR as some Linux machines will not
    # properly execute without it.
    env = os.environ.copy()
    env['GIT_DIR'] = join(src_dir, '.git')
    env = {str(key): str(value) for key, value in env.items()}
    for cmd, check_error in [
            ('git log -n1', True),
            ('git describe --tags --dirty', False),
            ('git status', True)]:
        try:
            stdout = check_output_env(cmd.split(), stderr=stderr, cwd=src_dir, env=env)
        except CalledProcessError as e:
            if check_error:
                raise Exception("git error: %s" % str(e))
        encoding = locale.getpreferredencoding()
        if not fo:
            encoding = sys.stdout.encoding
        encoding = encoding or 'utf-8'
        if hasattr(stdout, 'decode'):
            stdout = stdout.decode(encoding, 'ignore')
        if fo:
            fo.write(u'==> %s <==\n' % cmd)
            if verbose:
                fo.write(stdout + u'\n')
        else:
            if verbose:
                print(u'==> %s <==\n' % cmd)
                safe_print_unicode(stdout + u'\n')
コード例 #23
0
ファイル: source.py プロジェクト: patricksnape/conda-build
def svn_source(metadata, config):
    ''' Download a source from SVN repo. '''
    if config.verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stdout = FNULL
        stderr = FNULL

    meta = metadata.get_section('source')

    def parse_bool(s):
        return str(s).lower().strip() in ('yes', 'true', '1', 'on')

    svn = external.find_executable('svn', config.build_prefix)
    if not svn:
        sys.exit("Error: svn is not installed")
    svn_url = meta['svn_url']
    svn_revision = meta.get('svn_rev') or 'head'
    svn_ignore_externals = parse_bool(meta.get('svn_ignore_externals') or 'no')
    if not isdir(config.svn_cache):
        os.makedirs(config.svn_cache)
    svn_dn = svn_url.split(':', 1)[-1].replace('/', '_').replace(':', '_')
    cache_repo = join(config.svn_cache, svn_dn)
    if svn_ignore_externals:
        extra_args = ['--ignore-externals']
    else:
        extra_args = []
    if isdir(cache_repo):
        check_call_env([svn, 'up', '-r', svn_revision] + extra_args, cwd=cache_repo,
                       stdout=stdout, stderr=stderr)
    else:
        check_call_env([svn, 'co', '-r', svn_revision] + extra_args + [svn_url, cache_repo],
                       stdout=stdout, stderr=stderr)
        assert isdir(cache_repo)

    # now copy into work directory
    copy_into(cache_repo, config.work_dir, config.timeout, symlinks=True, locking=config.locking)

    if not config.verbose:
        FNULL.close()

    return config.work_dir
コード例 #24
0
def git_source(source_dict,
               git_cache,
               src_dir,
               recipe_path=None,
               verbose=True):
    ''' Download a source from a Git repo (or submodule, recursively) '''
    if not isdir(git_cache):
        os.makedirs(git_cache)

    git = external.find_executable('git')
    if not git:
        sys.exit(
            "Error: git is not installed in your root environment or as a build requirement."
        )

    git_depth = int(source_dict.get('git_depth', -1))
    git_ref = source_dict.get('git_rev') or 'HEAD'

    git_url = source_dict['git_url']
    if git_url.startswith('~'):
        git_url = os.path.expanduser(git_url)
    if git_url.startswith('.'):
        # It's a relative path from the conda recipe
        git_url = abspath(normpath(os.path.join(recipe_path, git_url)))
        if sys.platform == 'win32':
            git_dn = git_url.replace(':', '_')
        else:
            git_dn = git_url[1:]
    else:
        git_dn = git_url.split('://')[-1].replace('/', os.sep)
        if git_dn.startswith(os.sep):
            git_dn = git_dn[1:]
        git_dn = git_dn.replace(':', '_')
    mirror_dir = join(git_cache, git_dn)
    git_mirror_checkout_recursive(git,
                                  mirror_dir,
                                  src_dir,
                                  git_url,
                                  git_cache=git_cache,
                                  git_ref=git_ref,
                                  git_depth=git_depth,
                                  is_top_level=True,
                                  verbose=verbose)
    return git
コード例 #25
0
ファイル: post.py プロジェクト: groutr/conda-build
def mk_relative_linux(f, prefix, rpaths=('lib',)):
    'Respects the original values and converts abs to $ORIGIN-relative'

    elf = os.path.join(prefix, f)
    origin = os.path.dirname(elf)

    patchelf = external.find_executable('patchelf', prefix)
    try:
        existing = check_output([patchelf, '--print-rpath', elf]).decode('utf-8').splitlines()[0]
    except:
        print('patchelf: --print-rpath failed for %s\n' % (elf))
        return
    existing = existing.split(os.pathsep)
    new = []
    for old in existing:
        if old.startswith('$ORIGIN'):
            new.append(old)
        elif old.startswith('/'):
            # Test if this absolute path is outside of prefix. That is fatal.
            relpath = os.path.relpath(old, prefix)
            if relpath.startswith('..' + os.sep):
                print('Warning: rpath {0} is outside prefix {1} (removing it)'.format(old, prefix))
            else:
                relpath = '$ORIGIN/' + os.path.relpath(old, origin)
                if relpath not in new:
                    new.append(relpath)
    # Ensure that the asked-for paths are also in new.
    for rpath in rpaths:
        if not rpath.startswith('/'):
            # IMHO utils.relative shouldn't exist, but I am too paranoid to remove
            # it, so instead, make sure that what I think it should be replaced by
            # gives the same result and assert if not. Yeah, I am a chicken.
            rel_ours = os.path.normpath(utils.relative(f, rpath))
            rel_stdlib = os.path.normpath(os.path.relpath(rpath, os.path.dirname(f)))
            assert rel_ours == rel_stdlib, \
                'utils.relative {0} and relpath {1} disagree for {2}, {3}'.format(
                rel_ours, rel_stdlib, f, rpath)
            rpath = '$ORIGIN/' + rel_stdlib
        if rpath not in new:
            new.append(rpath)
    rpath = ':'.join(new)
    print('patchelf: file: %s\n    setting rpath to: %s' % (elf, rpath))
    call([patchelf, '--force-rpath', '--set-rpath', rpath, elf])
コード例 #26
0
ファイル: source.py プロジェクト: bioconda/conda-build
def svn_source(meta, config):
    """ Download a source from SVN repo. """
    if config.verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, "w")
        stdout = FNULL
        stderr = FNULL

    def parse_bool(s):
        return str(s).lower().strip() in ("yes", "true", "1", "on")

    svn = external.find_executable("svn", config.build_prefix)
    if not svn:
        sys.exit("Error: svn is not installed")
    svn_url = meta["svn_url"]
    svn_revision = meta.get("svn_rev") or "head"
    svn_ignore_externals = parse_bool(meta.get("svn_ignore_externals") or "no")
    if not isdir(config.svn_cache):
        os.makedirs(config.svn_cache)
    svn_dn = svn_url.split(":", 1)[-1].replace("/", "_").replace(":", "_")
    cache_repo = join(config.svn_cache, svn_dn)
    if svn_ignore_externals:
        extra_args = ["--ignore-externals"]
    else:
        extra_args = []
    if isdir(cache_repo):
        check_call_env([svn, "up", "-r", svn_revision] + extra_args, cwd=cache_repo, stdout=stdout, stderr=stderr)
    else:
        check_call_env(
            [svn, "co", "-r", svn_revision] + extra_args + [svn_url, cache_repo], stdout=stdout, stderr=stderr
        )
        assert isdir(cache_repo)

    # now copy into work directory
    copy_into(cache_repo, config.work_dir, config.timeout, symlinks=True)

    if not config.verbose:
        FNULL.close()

    return config.work_dir
コード例 #27
0
ファイル: source.py プロジェクト: patricksnape/conda-build
def hg_source(metadata, config):
    ''' Download a source from Mercurial repo. '''
    if config.verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stdout = FNULL
        stderr = FNULL

    meta = metadata.get_section('source')

    hg = external.find_executable('hg', config.build_prefix)
    if not hg:
        sys.exit('Error: hg not installed')
    hg_url = meta['hg_url']
    if not isdir(config.hg_cache):
        os.makedirs(config.hg_cache)
    hg_dn = hg_url.split(':')[-1].replace('/', '_')
    cache_repo = join(config.hg_cache, hg_dn)
    if isdir(cache_repo):
        check_call_env([hg, 'pull'], cwd=cache_repo, stdout=stdout, stderr=stderr)
    else:
        check_call_env([hg, 'clone', hg_url, cache_repo], stdout=stdout, stderr=stderr)
        assert isdir(cache_repo)

    # now clone in to work directory
    update = meta.get('hg_tag') or 'tip'
    if config.verbose:
        print('checkout: %r' % update)

    check_call_env([hg, 'clone', cache_repo, config.work_dir], stdout=stdout, stderr=stderr)
    check_call_env([hg, 'update', '-C', update], cwd=config.work_dir, stdout=stdout, stderr=stderr)

    if not config.verbose:
        FNULL.close()

    return config.work_dir
コード例 #28
0
ファイル: source.py プロジェクト: bioconda/conda-build
def hg_source(meta, config):
    """ Download a source from Mercurial repo. """
    if config.verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, "w")
        stdout = FNULL
        stderr = FNULL

    hg = external.find_executable("hg", config.build_prefix)
    if not hg:
        sys.exit("Error: hg not installed")
    hg_url = meta["hg_url"]
    if not isdir(config.hg_cache):
        os.makedirs(config.hg_cache)
    hg_dn = hg_url.split(":")[-1].replace("/", "_")
    cache_repo = join(config.hg_cache, hg_dn)
    if isdir(cache_repo):
        check_call_env([hg, "pull"], cwd=cache_repo, stdout=stdout, stderr=stderr)
    else:
        check_call_env([hg, "clone", hg_url, cache_repo], stdout=stdout, stderr=stderr)
        assert isdir(cache_repo)

    # now clone in to work directory
    update = meta.get("hg_tag") or "tip"
    if config.verbose:
        print("checkout: %r" % update)

    check_call_env([hg, "clone", cache_repo, config.work_dir], stdout=stdout, stderr=stderr)
    check_call_env([hg, "update", "-C", update], cwd=get_dir(config), stdout=stdout, stderr=stderr)

    if not config.verbose:
        FNULL.close()

    return config.work_dir
コード例 #29
0
ファイル: source.py プロジェクト: evhub/conda-build
def apply_patch(src_dir, path, config, git=None):
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    files, is_git_format = _get_patch_file_details(path)
    if git and is_git_format:
        # Prevents git from asking interactive questions,
        # also necessary to achieve sha1 reproducibility;
        # as is --committer-date-is-author-date. By this,
        # we mean a round-trip of git am/git format-patch
        # gives the same file.
        git_env = os.environ
        git_env['GIT_COMMITTER_NAME'] = 'conda-build'
        git_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'
        check_call_env([git, 'am', '--committer-date-is-author-date', path],
                       cwd=src_dir,
                       stdout=None,
                       env=git_env)
        config.git_commits_since_tag += 1
    else:
        print('Applying patch: %r' % path)
        patch = external.find_executable('patch', config.build_prefix)
        if patch is None:
            sys.exit("""\
        Error:
            Cannot use 'git' (not a git repo and/or patch) and did not find 'patch' in: %s
            You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
            or conda, m2-patch (Windows),
        """ % (os.pathsep.join(external.dir_paths)))
        patch_strip_level = _guess_patch_strip_level(files, src_dir)
        patch_args = ['-p%d' % patch_strip_level, '-i', path]
        if sys.platform == 'win32':
            patch_args[-1] = _ensure_unix_line_endings(path)
        check_call_env([patch] + patch_args, cwd=src_dir)
        if sys.platform == 'win32' and os.path.exists(patch_args[-1]):
            os.remove(patch_args[-1])  # clean up .patch_unix file
コード例 #30
0
# (package, skip_text)
cran_os_type_pkgs = [('bigReg', 'skip: True  # [not unix]'),
                     ('blatr',  'skip: True  # [not win]')
                    ]

@pytest.mark.parametrize("package, skip_text", cran_os_type_pkgs)
def test_cran_os_type(package, skip_text, testing_workdir, testing_config):
    api.skeletonize(packages=package, repo='cran', output_dir=testing_workdir,
                    config=testing_config)
    fpath = os.path.join(testing_workdir, 'r-' + package.lower(), 'meta.yaml') 
    with open(fpath) as f:
        assert skip_text in f.read()


@pytest.mark.slow
@pytest.mark.skipif(not external.find_executable("shellcheck"), reason="requires shellcheck >=0.7.0")
@pytest.mark.parametrize(
    "package, repo", [("r-usethis", "cran"), ("Perl::Lint", "cpan"), ("screen", "rpm")]
)
def test_build_sh_shellcheck_clean(package, repo, testing_workdir, testing_config):
    api.skeletonize(packages=package, repo=repo, output_dir=testing_workdir, config=testing_config)

    matches = []
    for root, dirnames, filenames in os.walk(testing_workdir):
        for filename in fnmatch.filter(filenames, "build.sh"):
            matches.append(os.path.join(root, filename))

    build_sh = matches[0]
    cmd = [
        "shellcheck",
        "--enable=all",
コード例 #31
0
def test_relative_git_url_submodule_clone(testing_workdir, monkeypatch):
    """
    A multi-part test encompassing the following checks:

    1. That git submodules identified with both relative and absolute URLs can be mirrored
       and cloned.

    2. That changes pushed to the original repository are updated in the mirror and finally
       reflected in the package version and filename via `GIT_DESCRIBE_TAG`.

    3. That `source.py` is using `check_call_env` and `check_output_env` and that those
       functions are using tools from the build env.
    """

    toplevel = os.path.join(testing_workdir, 'toplevel')
    os.mkdir(toplevel)
    relative_sub = os.path.join(testing_workdir, 'relative_sub')
    os.mkdir(relative_sub)
    absolute_sub = os.path.join(testing_workdir, 'absolute_sub')
    os.mkdir(absolute_sub)

    sys_git_env = os.environ.copy()
    sys_git_env['GIT_AUTHOR_NAME'] = 'conda-build'
    sys_git_env['GIT_AUTHOR_EMAIL'] = '*****@*****.**'
    sys_git_env['GIT_COMMITTER_NAME'] = 'conda-build'
    sys_git_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'

    # Find the git executable before putting our dummy one on PATH.
    git = find_executable('git')

    # Put the broken git on os.environ["PATH"]
    exename = dummy_executable(testing_workdir, 'git')
    monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep)
    # .. and ensure it gets run (and fails).
    FNULL = open(os.devnull, 'w')
    # Strangely ..
    #   stderr=FNULL suppresses the output from echo on OS X whereas
    #   stdout=FNULL suppresses the output from echo on Windows
    with pytest.raises(subprocess.CalledProcessError,
                       message="Dummy git was not executed"):
        check_call_env([exename, '--version'], stdout=FNULL, stderr=FNULL)
    FNULL.close()

    for tag in range(2):
        os.chdir(absolute_sub)
        if tag == 0:
            check_call_env([git, 'init'], env=sys_git_env)
        with open('absolute', 'w') as f:
            f.write(str(tag))
        check_call_env([git, 'add', 'absolute'], env=sys_git_env)
        check_call_env([git, 'commit', '-m', 'absolute{}'.format(tag)],
                       env=sys_git_env)

        os.chdir(relative_sub)
        if tag == 0:
            check_call_env([git, 'init'], env=sys_git_env)
        with open('relative', 'w') as f:
            f.write(str(tag))
        check_call_env([git, 'add', 'relative'], env=sys_git_env)
        check_call_env([git, 'commit', '-m', 'relative{}'.format(tag)],
                       env=sys_git_env)

        os.chdir(toplevel)
        if tag == 0:
            check_call_env([git, 'init'], env=sys_git_env)
        with open('toplevel', 'w') as f:
            f.write(str(tag))
        check_call_env([git, 'add', 'toplevel'], env=sys_git_env)
        check_call_env([git, 'commit', '-m', 'toplevel{}'.format(tag)],
                       env=sys_git_env)
        if tag == 0:
            check_call_env([
                git, 'submodule', 'add',
                convert_path_for_cygwin_or_msys2(git, absolute_sub), 'absolute'
            ],
                           env=sys_git_env)
            check_call_env(
                [git, 'submodule', 'add', '../relative_sub', 'relative'],
                env=sys_git_env)
        else:
            # Once we use a more recent Git for Windows than 2.6.4 on Windows or m2-git we
            # can change this to `git submodule update --recursive`.
            check_call_env([git, 'submodule', 'foreach', git, 'pull'],
                           env=sys_git_env)
        check_call_env(
            [git, 'commit', '-am', 'added submodules@{}'.format(tag)],
            env=sys_git_env)
        check_call_env(
            [git, 'tag', '-a',
             str(tag), '-m', 'tag {}'.format(tag)],
            env=sys_git_env)

        # It is possible to use `Git for Windows` here too, though you *must* not use a different
        # (type of) git than the one used above to add the absolute submodule, because .gitmodules
        # stores the absolute path and that is not interchangeable between MSYS2 and native Win32.
        #
        # Also, git is set to False here because it needs to be rebuilt with the longer prefix. As
        # things stand, my _b_env folder for this test contains more than 80 characters.
        requirements = ('requirements',
                        OrderedDict([('build', [
                            'git            # [False]',
                            'm2-git         # [win]', 'm2-filesystem  # [win]'
                        ])]))

        filename = os.path.join(testing_workdir, 'meta.yaml')
        data = OrderedDict([
            ('package',
             OrderedDict([('name', 'relative_submodules'),
                          ('version', '{{ GIT_DESCRIBE_TAG }}')])),
            ('source',
             OrderedDict([('git_url', toplevel),
                          ('git_tag', str(tag))])), requirements,
            ('build',
             OrderedDict([('script', [
                 'git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%%s > '
                 '%PREFIX%\\summaries.txt  # [win]',
                 'git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%s > '
                 '$PREFIX/summaries.txt   # [not win]'
             ])])),
            ('test',
             OrderedDict([('commands', [
                 'echo absolute{}relative{} > %PREFIX%\\expected_summaries.txt       # [win]'
                 .format(tag, tag),
                 'fc.exe /W %PREFIX%\\expected_summaries.txt %PREFIX%\\summaries.txt # [win]',
                 'echo absolute{}relative{} > $PREFIX/expected_summaries.txt         # [not win]'
                 .format(tag, tag),
                 'diff -wuN ${PREFIX}/expected_summaries.txt ${PREFIX}/summaries.txt # [not win]'
             ])]))
        ])

        with open(filename, 'w') as outfile:
            outfile.write(
                yaml.dump(data, default_flow_style=False, width=999999999))
        # Reset the path because our broken, dummy `git` would cause `render_recipe`
        # to fail, while no `git` will cause the build_dependencies to be installed.
        monkeypatch.undo()
        # This will (after one spin round the loop) install and run 'git' with the
        # build env prepended to os.environ[]
        output = api.get_output_file_path(testing_workdir)[0]
        assert ("relative_submodules-{}-".format(tag) in output)
        api.build(testing_workdir)
コード例 #32
0
ファイル: build.py プロジェクト: msarahan/conda-build
def build(m, config, post=None, need_source_download=True, need_reparse_in_env=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if m.skip():
        print_skip_message(m)
        return False

    if config.skip_existing:
        package_exists = is_package_built(m, config)
        if package_exists:
            print(m.dist(), "is already built in {0}, skipping.".format(package_exists))
            return False

    if post in [False, None]:
        print("BUILD START:", m.dist())
        if m.uses_jinja and (need_source_download or need_reparse_in_env):
            print("    (actual version deferred until further download or env creation)")

        specs = [ms.spec for ms in m.ms_depends('build')]
        create_env(config.build_prefix, specs, config=config)
        vcs_source = m.uses_vcs_in_build
        if vcs_source and vcs_source not in specs:
            vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source
            has_vcs_available = os.path.isfile(external.find_executable(vcs_executable,
                                                                config.build_prefix) or "")
            if not has_vcs_available:
                if (vcs_source != "mercurial" or
                        not any(spec.startswith('python') and "3." in spec
                                for spec in specs)):
                    specs.append(vcs_source)

                    log.warn("Your recipe depends on %s at build time (for templates), "
                            "but you have not listed it as a build dependency.  Doing "
                                "so for this build.", vcs_source)

                    # Display the name only
                    # Version number could be missing due to dependency on source info.
                    create_env(config.build_prefix, specs, config=config)
                else:
                    raise ValueError("Your recipe uses mercurial in build, but mercurial"
                                    " does not yet support Python 3.  Please handle all of "
                                    "your mercurial actions outside of your build script.")

        if need_source_download:
            # Execute any commands fetching the source (e.g., git) in the _build environment.
            # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
            # dependencies.
            with path_prepended(config.build_prefix):
                m, need_source_download, need_reparse_in_env = parse_or_try_download(m,
                                                                no_download_source=False,
                                                                force_download=True,
                                                                config=config)
            assert not need_source_download, "Source download failed.  Please investigate."
            if m.uses_jinja:
                print("BUILD START (revised):", m.dist())

        if need_reparse_in_env:
            reparse(m, config=config)
            print("BUILD START (revised):", m.dist())

        if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]:
            print("%s is installed as a build dependency. Removing." %
                m.name())
            index = get_build_index(config=config, clear_cache=False)
            actions = plan.remove_actions(config.build_prefix, [m.name()], index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        print("Package:", m.dist())

        with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"),
                                   timeout=config.timeout):
            # get_dir here might be just work, or it might be one level deeper,
            #    dependening on the source.
            src_dir = config.work_dir
            if isdir(src_dir):
                print("source tree in:", src_dir)
            else:
                print("no source - creating empty work folder")
                os.makedirs(src_dir)

            rm_rf(config.info_dir)
            files1 = prefix_files(prefix=config.build_prefix)
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    log.warn("Glob %s from always_include_files does not match any files", pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if isdir(src_dir):
                if on_win:
                    build_file = join(m.path, 'bld.bat')
                    if script:
                        build_file = join(src_dir, 'bld.bat')
                        with open(build_file, 'w') as bf:
                            bf.write(script)
                    import conda_build.windows as windows
                    windows.build(m, build_file, config=config)
                else:
                    build_file = join(m.path, 'build.sh')

                    # There is no sense in trying to run an empty build script.
                    if isfile(build_file) or script:
                        with path_prepended(config.build_prefix):
                            env = environ.get_dict(config=config, m=m)
                        env["CONDA_BUILD_STATE"] = "BUILD"
                        work_file = join(config.work_dir, 'conda_build.sh')
                        if script:
                            with open(work_file, 'w') as bf:
                                bf.write(script)
                        if config.activate:
                            if isfile(build_file):
                                data = open(build_file).read()
                            else:
                                data = open(work_file).read()
                            with open(work_file, 'w') as bf:
                                bf.write("source {conda_root}activate {build_prefix} &> "
                                    "/dev/null\n".format(conda_root=root_script_dir + os.path.sep,
                                                         build_prefix=config.build_prefix))
                                bf.write(data)
                        else:
                            if not isfile(work_file):
                                copy_into(build_file, work_file, config.timeout)
                        os.chmod(work_file, 0o766)

                        if isfile(work_file):
                            cmd = [shell_path, '-x', '-e', work_file]
                            # this should raise if any problems occur while building
                            _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m, config=config)
        create_post_scripts(m, config=config)

        if not is_noarch_python(m):
            create_entry_points(m.get_value('build/entry_points'), config=config)
        files2 = prefix_files(prefix=config.build_prefix)

        post_process(sorted(files2 - files1),
                     prefix=config.build_prefix,
                     config=config,
                     preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')),
                     noarch=m.get_value('build/noarch'))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files(prefix=config.build_prefix)
        if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1):
            meta_files = (tuple(f for f in files2 - files1 if config.meta_dir in
                    join(config.build_prefix, f)),)
            sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory.
This error usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" % meta_files))
        post_build(m, sorted(files2 - files1),
                    prefix=config.build_prefix,
                    build_python=config.build_python,
                    croot=config.croot)

        entry_point_script_names = get_entry_point_script_names(get_entry_points(config, m))
        if is_noarch_python(m):
            pkg_files = [f for f in sorted(files2 - files1) if f not in entry_point_script_names]
        else:
            pkg_files = sorted(files2 - files1)

        create_info_files(m, pkg_files, config=config, prefix=config.build_prefix)

        if m.get_value('build/noarch_python'):
            noarch_python.transform(m, sorted(files2 - files1), config.build_prefix)
        elif is_noarch_python(m):
            noarch_python.populate_files(
                m, pkg_files, config.build_prefix, entry_point_script_names)

        files3 = prefix_files(prefix=config.build_prefix)
        fix_permissions(files3 - files1, config.build_prefix)

        path = bldpkg_path(m, config)

        # lock the output directory while we build this file
        # create the tarball in a temporary directory to minimize lock time
        with TemporaryDirectory() as tmp:
            tmp_path = os.path.join(tmp, os.path.basename(path))
            t = tarfile.open(tmp_path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            # we're done building, perform some checks
            tarcheck.check_all(tmp_path)

            copy_into(tmp_path, path, config.timeout)
        update_index(config.bldpkgs_dir, config, could_be_mirror=False)

    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())

    # returning true here says package is OK to test
    return True
コード例 #33
0

@pytest.mark.parametrize("package, skip_text", cran_os_type_pkgs)
def test_cran_os_type(package, skip_text, testing_workdir, testing_config):
    api.skeletonize(packages=package,
                    repo='cran',
                    output_dir=testing_workdir,
                    config=testing_config)
    fpath = os.path.join(testing_workdir, 'r-' + package.lower(), 'meta.yaml')
    with open(fpath) as f:
        assert skip_text in f.read()


@pytest.mark.slow
@pytest.mark.flaky(max_runs=5)
@pytest.mark.skipif(not external.find_executable("shellcheck"),
                    reason="requires shellcheck >=0.7.0")
@pytest.mark.parametrize("package, repo", [("r-rmarkdown", "cran"),
                                           ("Perl::Lint", "cpan"),
                                           ("screen", "rpm")])
def test_build_sh_shellcheck_clean(package, repo, testing_workdir,
                                   testing_config):
    api.skeletonize(packages=package,
                    repo=repo,
                    output_dir=testing_workdir,
                    config=testing_config)

    matches = []
    for root, dirnames, filenames in os.walk(testing_workdir):
        for filename in fnmatch.filter(filenames, "build.sh"):
            matches.append(os.path.join(root, filename))
コード例 #34
0
ファイル: source.py プロジェクト: mingwandroid/conda-build
def apply_patch(src_dir, path, config, git=None):
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    files, is_git_format = _get_patch_file_details(path)
    if git and is_git_format:
        # Prevents git from asking interactive questions,
        # also necessary to achieve sha1 reproducibility;
        # as is --committer-date-is-author-date. By this,
        # we mean a round-trip of git am/git format-patch
        # gives the same file.
        git_env = os.environ
        git_env['GIT_COMMITTER_NAME'] = 'conda-build'
        git_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'
        check_call_env([git, 'am', '--committer-date-is-author-date', path],
                       cwd=src_dir, stdout=None, env=git_env)
        config.git_commits_since_tag += 1
    else:
        print('Applying patch: %r' % path)
        patch = external.find_executable('patch', config.build_prefix)
        if patch is None:
            sys.exit("""\
        Error:
            Cannot use 'git' (not a git repo and/or patch) and did not find 'patch' in: %s
            You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
            or conda, m2-patch (Windows),
        """ % (os.pathsep.join(external.dir_paths)))
        patch_strip_level = _guess_patch_strip_level(files, src_dir)
        patch_args = ['-p%d' % patch_strip_level, '--ignore-whitespace', '-i', path]

        # line endings are a pain.
        # https://unix.stackexchange.com/a/243748/34459

        try:
            log = get_logger(__name__)
            log.info("Trying to apply patch as-is")
            check_call_env([patch] + patch_args, cwd=src_dir)
        except CalledProcessError:
            if sys.platform == 'win32':
                unix_ending_file = _ensure_unix_line_endings(path)
                patch_args[-1] = unix_ending_file
                try:
                    log.info("Applying unmodified patch failed.  "
                             "Convert to unix line endings and trying again.")
                    check_call_env([patch] + patch_args, cwd=src_dir)
                except:
                    log.info("Applying unix patch failed.  "
                             "Convert to CRLF line endings and trying again with --binary.")
                    patch_args.insert(0, '--binary')
                    win_ending_file = _ensure_win_line_endings(path)
                    patch_args[-1] = win_ending_file
                    try:
                        check_call_env([patch] + patch_args, cwd=src_dir)
                    except:
                        raise
                    finally:
                        if os.path.exists(win_ending_file):
                            os.remove(win_ending_file)  # clean up .patch_win file
                finally:
                    if os.path.exists(unix_ending_file):
                        os.remove(unix_ending_file)  # clean up .patch_unix file
            else:
                raise
コード例 #35
0
ファイル: environ.py プロジェクト: t-bltg/conda-build
def meta_vars(meta, skip_build_id=False):
    d = {}
    for var_name in ensure_list(meta.get_value('build/script_env', [])):
        if '=' in var_name:
            value = var_name.split('=')[-1]
            var_name = var_name.split('=')[0]
        else:
            value = os.getenv(var_name)
        if value is None:
            warnings.warn(
                "The environment variable '%s' is undefined." % var_name,
                UserWarning)
        else:
            d[var_name] = value
            warnings.warn(
                "The environment variable '%s' is being passed through with value '%s'.  "
                "If you are splitting build and test phases with --no-test, please ensure "
                "that this value is also set similarly at test time." %
                (var_name,
                 "<hidden>" if meta.config.suppress_variables else value),
                UserWarning)

    folder = meta.get_value('source/0/folder', '')
    repo_dir = join(meta.config.work_dir, folder)
    git_dir = join(repo_dir, '.git')
    hg_dir = join(repo_dir, '.hg')

    if not isinstance(git_dir, str):
        # On Windows, subprocess env can't handle unicode.
        git_dir = git_dir.encode(sys.getfilesystemencoding() or 'utf-8')

    git_exe = external.find_executable('git', meta.config.build_prefix)
    if git_exe and os.path.exists(git_dir):
        # We set all 'source' metavars using the FIRST source entry in meta.yaml.
        git_url = meta.get_value('source/0/git_url')

        if os.path.exists(git_url):
            if sys.platform == 'win32':
                git_url = utils.convert_unix_path_to_win(git_url)
            # If git_url is a relative path instead of a url, convert it to an abspath
            git_url = normpath(join(meta.path, git_url))

        _x = False

        if git_url:
            _x = verify_git_repo(git_exe, git_dir, git_url,
                                 meta.config.git_commits_since_tag,
                                 meta.config.debug,
                                 meta.get_value('source/0/git_rev', 'HEAD'))

        if _x or meta.get_value('source/0/path'):
            d.update(get_git_info(git_exe, git_dir, meta.config.debug))

    elif external.find_executable(
            'hg', meta.config.build_prefix) and os.path.exists(hg_dir):
        d.update(get_hg_build_info(hg_dir))

    # use `get_value` to prevent early exit while name is still unresolved during rendering
    d['PKG_NAME'] = meta.get_value('package/name')
    d['PKG_VERSION'] = meta.version()
    d['PKG_BUILDNUM'] = str(meta.build_number() or 0)
    if meta.final and not skip_build_id:
        d['PKG_BUILD_STRING'] = str(meta.build_id())
        d['PKG_HASH'] = meta.hash_dependencies()
    else:
        d['PKG_BUILD_STRING'] = 'placeholder'
        d['PKG_HASH'] = '1234567'
    d['RECIPE_DIR'] = meta.path
    return d
コード例 #36
0
ファイル: source.py プロジェクト: jwillemsen/conda-build
def apply_patch(src_dir, path, config, git=None):
    def try_apply_patch(patch, patch_args, cwd, stdout, stderr):
        # An old reference: https://unix.stackexchange.com/a/243748/34459
        #
        # I am worried that '--ignore-whitespace' may be destructive. If so we should
        # avoid passing it, particularly in the initial (most likely to succeed) calls.
        #
        # From here-in I define a 'native' patch as one which has:
        # 1. LF for the patch block metadata.
        # 2. CRLF or LF for the actual patched lines matching those of the source lines.
        #
        # Calls to a raw 'patch' are destructive in various ways:
        # 1. It leaves behind .rej and .orig files
        # 2. If you pass it a patch with incorrect CRLF changes and do not pass --binary and
        #    if any of those blocks *can* be applied, then the whole file gets written out with
        #    LF.  This cannot be reversed either; the text changes will be reversed but not
        #    line-feed changes (since all line-endings get changed, not just those of the of
        #    patched lines)
        # 3. If patching fails, the bits that succeeded remain, so patching is not at all
        #    atomic.
        #
        # Still, we do our best to mitigate all of this as follows:
        # 1. We use --dry-run to test for applicability first.
        # 2 We check for native application of a native patch (--binary, without --ignore-whitespace)
        #
        # Some may bemoan the loss of patch failure artifacts, but it is fairly random which
        # patch and patch attempt they apply to so their informational value is low, besides that,
        # they are ugly.
        #
        import tempfile
        temp_name = os.path.join(tempfile.gettempdir(),
                                 next(tempfile._get_candidate_names()))
        base_patch_args = ['--no-backup-if-mismatch', '--batch'
                           ] + patch_args + ['-r', temp_name]
        log = get_logger(__name__)
        try:
            try_patch_args = base_patch_args[:]
            try_patch_args.append('--dry-run')
            log.debug("dry-run applying with\n{} {}".format(
                patch, try_patch_args))
            check_call_env([patch] + try_patch_args,
                           cwd=cwd,
                           stdout=stdout,
                           stderr=stderr)
            # You can use this to pretend the patch failed so as to test reversal!
            # raise CalledProcessError(-1, ' '.join([patch] + patch_args))
        except Exception as e:
            raise e
        else:
            check_call_env([patch] + base_patch_args,
                           cwd=cwd,
                           stdout=stdout,
                           stderr=stderr)
        finally:
            if os.path.exists(temp_name):
                os.unlink(temp_name)

    exception = None
    if not isfile(path):
        raise RuntimeError('Error: no such patch: %s' % path)

    if config.verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stdout = FNULL
        stderr = FNULL

    files, is_git_format = _get_patch_file_details(path)
    if git and is_git_format:
        # Prevents git from asking interactive questions,
        # also necessary to achieve sha1 reproducibility;
        # as is --committer-date-is-author-date. By this,
        # we mean a round-trip of git am/git format-patch
        # gives the same file.
        git_env = os.environ
        git_env['GIT_COMMITTER_NAME'] = 'conda-build'
        git_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'
        check_call_env(
            [git, 'am', '-3', '--committer-date-is-author-date', path],
            cwd=src_dir,
            stdout=stdout,
            stderr=stderr,
            env=git_env)
        config.git_commits_since_tag += 1
    else:
        if config.verbose:
            print('Applying patch: %r' % path)
        patch = external.find_executable('patch', config.build_prefix)
        if patch is None or len(patch) == 0:
            sys.exit("""\
        Error:
            Cannot use 'git' (not a git repo and/or patch) and did not find 'patch' in: %s
            You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
            or conda, m2-patch (Windows),
        """ % (os.pathsep.join(external.dir_paths)))
        patch_strip_level = _guess_patch_strip_level(files, src_dir)
        path_args = ['-i', path]
        patch_args = ['-p%d' % patch_strip_level]

        try:
            log = get_logger(__name__)
            # This is the case we check first of all as it is the case that allows a properly line-ended
            # patch to apply correctly to a properly line-ended source tree, modifying it following the
            # patch chunks exactly.
            try_apply_patch(patch,
                            patch_args + ['--binary'] + path_args,
                            cwd=src_dir,
                            stdout=stdout,
                            stderr=stderr)
        except CalledProcessError as e:
            # Capture the first exception
            exception = e
            if config.verbose:
                log.info(
                    "Applying patch natively failed.  "
                    "Trying to apply patch non-binary with --ignore-whitespace"
                )
            try:
                try_apply_patch(patch,
                                patch_args + ['--ignore-whitespace'] +
                                path_args,
                                cwd=src_dir,
                                stdout=stdout,
                                stderr=stderr)
            except CalledProcessError as e:  # noqa
                unix_ending_file = _ensure_unix_line_endings(path)
                path_args[-1] = unix_ending_file
                try:
                    if config.verbose:
                        log.info(
                            "Applying natively *and* non-binary failed!  "
                            "Converting to unix line endings and trying again.  "
                            "WARNING :: This is destructive to the source file line-endings."
                        )
                    # If this succeeds, it will change the source files' CRLFs to LFs. This can
                    # mess things up both for subsequent attempts (this line-ending change is not
                    # reversible) but worse, for subsequent, correctly crafted (I'm calling these
                    # "native" from now on) patches.
                    try_apply_patch(patch,
                                    patch_args + ['--ignore-whitespace'] +
                                    path_args,
                                    cwd=src_dir,
                                    stdout=stdout,
                                    stderr=stderr)
                except CalledProcessError:
                    if config.verbose:
                        log.warning(
                            "Applying natively, non-binary *and* unix attempts all failed!?  "
                            "Converting to CRLF line endings and trying again with "
                            "--ignore-whitespace and --binary. This can be destructive (even"
                            "with attempted reversal) to the source files' line-endings."
                        )
                    win_ending_file = _ensure_win_line_endings(path)
                    path_args[-1] = win_ending_file
                    try:
                        try_apply_patch(patch,
                                        patch_args +
                                        ['--ignore-whitespace', '--binary'] +
                                        path_args,
                                        cwd=src_dir,
                                        stdout=stdout,
                                        stderr=stderr)
                    except:
                        pass
                    else:
                        exception = None
                    finally:
                        if os.path.exists(win_ending_file):
                            os.remove(
                                win_ending_file)  # clean up .patch_unix file
                else:
                    exception = None
                finally:
                    if os.path.exists(unix_ending_file):
                        os.remove(unix_ending_file)
    if exception:
        raise exception
コード例 #37
0
def apply_one_patch(src_dir, recipe_dir, rel_path, config, git=None):
    path = os.path.join(recipe_dir, rel_path)
    if config.verbose:
        print('Applying patch: {}'.format(path))

    def try_apply_patch(patch, patch_args, cwd, stdout, stderr):
        # An old reference: https://unix.stackexchange.com/a/243748/34459
        #
        # I am worried that '--ignore-whitespace' may be destructive. If so we should
        # avoid passing it, particularly in the initial (most likely to succeed) calls.
        #
        # From here-in I define a 'native' patch as one which has:
        # 1. LF for the patch block metadata.
        # 2. CRLF or LF for the actual patched lines matching those of the source lines.
        #
        # Calls to a raw 'patch' are destructive in various ways:
        # 1. It leaves behind .rej and .orig files
        # 2. If you pass it a patch with incorrect CRLF changes and do not pass --binary and
        #    if any of those blocks *can* be applied, then the whole file gets written out with
        #    LF.  This cannot be reversed either; the text changes will be reversed but not
        #    line-feed changes (since all line-endings get changed, not just those of the of
        #    patched lines)
        # 3. If patching fails, the bits that succeeded remain, so patching is not at all
        #    atomic.
        #
        # Still, we do our best to mitigate all of this as follows:
        # 1. We use --dry-run to test for applicability first.
        # 2 We check for native application of a native patch (--binary, without --ignore-whitespace)
        #
        # Some may bemoan the loss of patch failure artifacts, but it is fairly random which
        # patch and patch attempt they apply to so their informational value is low, besides that,
        # they are ugly.
        #
        import tempfile
        temp_name = os.path.join(tempfile.gettempdir(),
                                 next(tempfile._get_candidate_names()))
        base_patch_args = ['--no-backup-if-mismatch', '--batch'] + patch_args
        try:
            try_patch_args = base_patch_args[:]
            try_patch_args.append('--dry-run')
            log.debug("dry-run applying with\n{} {}".format(
                patch, try_patch_args))
            check_call_env([patch] + try_patch_args,
                           cwd=cwd,
                           stdout=stdout,
                           stderr=stderr)
            # You can use this to pretend the patch failed so as to test reversal!
            # raise CalledProcessError(-1, ' '.join([patch] + patch_args))
        except Exception as e:
            raise e
        else:
            check_call_env([patch] + base_patch_args,
                           cwd=cwd,
                           stdout=stdout,
                           stderr=stderr)
        finally:
            if os.path.exists(temp_name):
                os.unlink(temp_name)

    exception = None
    if not isfile(path):
        raise RuntimeError('Error: no such patch: %s' % path)

    if config.verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, 'wb')
        stdout = FNULL
        stderr = FNULL

    attributes_output = ""
    patch_exe = external.find_executable('patch', config.build_prefix)
    if not len(patch_exe):
        patch_exe = external.find_executable('patch', config.host_prefix)
        if not len(patch_exe):
            patch_exe = ''
    with TemporaryDirectory() as tmpdir:
        patch_attributes = _get_patch_attributes(path, patch_exe, git, src_dir,
                                                 stdout, stderr, tmpdir)
        attributes_output += _patch_attributes_debug(patch_attributes,
                                                     rel_path,
                                                     config.build_prefix)
        if git and patch_attributes['format'] == 'git':
            # Prevents git from asking interactive questions,
            # also necessary to achieve sha1 reproducibility;
            # as is --committer-date-is-author-date. By this,
            # we mean a round-trip of git am/git format-patch
            # gives the same file.
            git_env = os.environ
            git_env['GIT_COMMITTER_NAME'] = 'conda-build'
            git_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'
            check_call_env(
                [git, 'am', '-3', '--committer-date-is-author-date', path],
                cwd=src_dir,
                stdout=stdout,
                stderr=stderr,
                env=git_env)
            config.git_commits_since_tag += 1
        else:
            if patch_exe is None or len(patch_exe) == 0:
                errstr = ("""\
            Error:
                Cannot use 'git' (not a git repo and/or patch) and did not find 'patch' in: %s
                You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
                or conda, m2-patch (Windows),
            """ % (os.pathsep.join(external.dir_paths)))
                raise RuntimeError(errstr)
            patch_args = patch_attributes['args']

            if config.verbose:
                print('Applying patch: {} with args:\n{}'.format(
                    path, patch_args))

            try:
                try_apply_patch(patch_exe,
                                patch_args,
                                cwd=src_dir,
                                stdout=stdout,
                                stderr=stderr)
            except Exception as e:
                exception = e
        if exception:
            raise exception
    return attributes_output
コード例 #38
0
ファイル: environ.py プロジェクト: cav71/conda-build
def meta_vars(meta, config, skip_build_id=False):
    d = {}
    for var_name in ensure_list(meta.get_value('build/script_env', [])):
        value = os.getenv(var_name)
        if value is None:
            warnings.warn(
                "The environment variable '%s' is undefined." % var_name,
                UserWarning
            )
        else:
            d[var_name] = value
            warnings.warn(
                "The environment variable '%s' is being passed through with value %s.  "
                "If you are splitting build and test phases with --no-test, please ensure "
                "that this value is also set similarly at test time." % (var_name, value),
                UserWarning
            )

    folder = meta.get_value('source/0/folder', '')
    repo_dir = join(config.work_dir, folder)
    git_dir = join(repo_dir, '.git')
    hg_dir = join(repo_dir, '.hg')

    if not isinstance(git_dir, str):
        # On Windows, subprocess env can't handle unicode.
        git_dir = git_dir.encode(sys.getfilesystemencoding() or 'utf-8')

    git_exe = external.find_executable('git', config.build_prefix)
    if git_exe and os.path.exists(git_dir):
        # We set all 'source' metavars using the FIRST source entry in meta.yaml.
        git_url = meta.get_value('source/0/git_url')

        if os.path.exists(git_url):
            if sys.platform == 'win32':
                git_url = utils.convert_unix_path_to_win(git_url)
            # If git_url is a relative path instead of a url, convert it to an abspath
            git_url = normpath(join(meta.path, git_url))

        _x = False

        if git_url:
            _x = verify_git_repo(git_exe,
                                 git_dir,
                                 git_url,
                                 config.git_commits_since_tag,
                                 config.debug,
                                 meta.get_value('source/0/git_rev', 'HEAD'))

        if _x or meta.get_value('source/0/path'):
            d.update(get_git_info(git_exe, git_dir, config.debug))

    elif external.find_executable('hg', config.build_prefix) and os.path.exists(hg_dir):
        d.update(get_hg_build_info(hg_dir))

    # use `get_value` to prevent early exit while name is still unresolved during rendering
    d['PKG_NAME'] = meta.get_value('package/name')
    d['PKG_VERSION'] = meta.version()
    d['PKG_BUILDNUM'] = str(meta.build_number() or 0)
    if meta.final and not skip_build_id:
        d['PKG_BUILD_STRING'] = str(meta.build_id())
        d['PKG_HASH'] = meta.hash_dependencies()
    else:
        d['PKG_BUILD_STRING'] = 'placeholder'
        d['PKG_HASH'] = '1234567'
    d['RECIPE_DIR'] = (meta.path if meta.path else
                       meta.meta.get('extra', {}).get('parent_recipe', {}).get('path', ''))
    return d
コード例 #39
0
def apply_patch(src_dir, path, config, git=None):
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    files, is_git_format = _get_patch_file_details(path)
    if git and is_git_format:
        # Prevents git from asking interactive questions,
        # also necessary to achieve sha1 reproducibility;
        # as is --committer-date-is-author-date. By this,
        # we mean a round-trip of git am/git format-patch
        # gives the same file.
        git_env = os.environ
        git_env['GIT_COMMITTER_NAME'] = 'conda-build'
        git_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'
        check_call_env([git, 'am', '--committer-date-is-author-date', path],
                       cwd=src_dir, stdout=None, env=git_env)
        config.git_commits_since_tag += 1
    else:
        print('Applying patch: %r' % path)
        patch = external.find_executable('patch', config.build_prefix)
        if patch is None:
            sys.exit("""\
        Error:
            Cannot use 'git' (not a git repo and/or patch) and did not find 'patch' in: %s
            You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
            or conda, m2-patch (Windows),
        """ % (os.pathsep.join(external.dir_paths)))
        patch_strip_level = _guess_patch_strip_level(files, src_dir)
        patch_args = ['-p%d' % patch_strip_level, '-i', path]

        # line endings are a pain.
        # https://unix.stackexchange.com/a/243748/34459

        try:
            log = get_logger(__name__)
            log.info("Trying to apply patch as-is")
            check_call_env([patch] + patch_args, cwd=src_dir)
        except CalledProcessError:
            if sys.platform == 'win32':
                unix_ending_file = _ensure_unix_line_endings(path)
                patch_args[-1] = unix_ending_file
                try:
                    log.info("Applying unmodified patch failed.  "
                             "Convert to unix line endings and trying again.")
                    check_call_env([patch] + patch_args, cwd=src_dir)
                except:
                    log.info("Applying unix patch failed.  "
                             "Convert to CRLF line endings and trying again with --binary.")
                    patch_args.insert(0, '--binary')
                    win_ending_file = _ensure_win_line_endings(path)
                    patch_args[-1] = win_ending_file
                    try:
                        check_call_env([patch] + patch_args, cwd=src_dir)
                    except:
                        raise
                    finally:
                        if os.path.exists(win_ending_file):
                            os.remove(win_ending_file)  # clean up .patch_win file
                finally:
                    if os.path.exists(unix_ending_file):
                        os.remove(unix_ending_file)  # clean up .patch_unix file
            else:
                raise
コード例 #40
0
def test_relative_git_url_submodule_clone(testing_workdir, monkeypatch):
    """
    A multi-part test encompassing the following checks:

    1. That git submodules identified with both relative and absolute URLs can be mirrored
       and cloned.

    2. That changes pushed to the original repository are updated in the mirror and finally
       reflected in the package version and filename via `GIT_DESCRIBE_TAG`.

    3. That `source.py` is using `check_call_env` and `check_output_env` and that those
       functions are using tools from the build env.
    """

    toplevel = os.path.join(testing_workdir, 'toplevel')
    os.mkdir(toplevel)
    relative_sub = os.path.join(testing_workdir, 'relative_sub')
    os.mkdir(relative_sub)
    absolute_sub = os.path.join(testing_workdir, 'absolute_sub')
    os.mkdir(absolute_sub)

    sys_git_env = os.environ.copy()
    sys_git_env['GIT_AUTHOR_NAME'] = 'conda-build'
    sys_git_env['GIT_AUTHOR_EMAIL'] = '*****@*****.**'
    sys_git_env['GIT_COMMITTER_NAME'] = 'conda-build'
    sys_git_env['GIT_COMMITTER_EMAIL'] = '*****@*****.**'

    # Find the git executable before putting our dummy one on PATH.
    git = find_executable('git')

    # Put the broken git on os.environ["PATH"]
    exename = dummy_executable(testing_workdir, 'git')
    monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep)
    # .. and ensure it gets run (and fails).
    FNULL = open(os.devnull, 'w')
    # Strangely ..
    #   stderr=FNULL suppresses the output from echo on OS X whereas
    #   stdout=FNULL suppresses the output from echo on Windows
    with pytest.raises(subprocess.CalledProcessError, message="Dummy git was not executed"):
        check_call_env([exename, '--version'], stdout=FNULL, stderr=FNULL)
    FNULL.close()

    for tag in range(2):
        os.chdir(absolute_sub)
        if tag == 0:
            check_call_env([git, 'init'], env=sys_git_env)
        with open('absolute', 'w') as f:
            f.write(str(tag))
        check_call_env([git, 'add', 'absolute'], env=sys_git_env)
        check_call_env([git, 'commit', '-m', 'absolute{}'.format(tag)],
                                env=sys_git_env)

        os.chdir(relative_sub)
        if tag == 0:
            check_call_env([git, 'init'], env=sys_git_env)
        with open('relative', 'w') as f:
            f.write(str(tag))
        check_call_env([git, 'add', 'relative'], env=sys_git_env)
        check_call_env([git, 'commit', '-m', 'relative{}'.format(tag)],
                                env=sys_git_env)

        os.chdir(toplevel)
        if tag == 0:
            check_call_env([git, 'init'], env=sys_git_env)
        with open('toplevel', 'w') as f:
            f.write(str(tag))
        check_call_env([git, 'add', 'toplevel'], env=sys_git_env)
        check_call_env([git, 'commit', '-m', 'toplevel{}'.format(tag)],
                                env=sys_git_env)
        if tag == 0:
            check_call_env([git, 'submodule', 'add',
                            convert_path_for_cygwin_or_msys2(git, absolute_sub), 'absolute'],
                           env=sys_git_env)
            check_call_env([git, 'submodule', 'add', '../relative_sub', 'relative'],
                           env=sys_git_env)
        else:
            # Once we use a more recent Git for Windows than 2.6.4 on Windows or m2-git we
            # can change this to `git submodule update --recursive`.
            check_call_env([git, 'submodule', 'foreach', git, 'pull'], env=sys_git_env)
        check_call_env([git, 'commit', '-am', 'added submodules@{}'.format(tag)],
                              env=sys_git_env)
        check_call_env([git, 'tag', '-a', str(tag), '-m', 'tag {}'.format(tag)],
                                env=sys_git_env)

        # It is possible to use `Git for Windows` here too, though you *must* not use a different
        # (type of) git than the one used above to add the absolute submodule, because .gitmodules
        # stores the absolute path and that is not interchangeable between MSYS2 and native Win32.
        #
        # Also, git is set to False here because it needs to be rebuilt with the longer prefix. As
        # things stand, my _b_env folder for this test contains more than 80 characters.
        requirements = ('requirements', OrderedDict([
                        ('build',
                         ['git            # [False]',
                          'm2-git         # [win]',
                          'm2-filesystem  # [win]'])]))

        filename = os.path.join(testing_workdir, 'meta.yaml')
        data = OrderedDict([
            ('package', OrderedDict([
                ('name', 'relative_submodules'),
                ('version', '{{ GIT_DESCRIBE_TAG }}')])),
            ('source', OrderedDict([
                ('git_url', toplevel),
                ('git_tag', str(tag))])),
            requirements,
            ('build', OrderedDict([
                ('script',
                 ['git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%%s > '
                       '%PREFIX%\\summaries.txt  # [win]',
                  'git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%s > '
                       '$PREFIX/summaries.txt   # [not win]'])
            ])),
            ('test', OrderedDict([
                ('commands',
                 ['echo absolute{}relative{} > %PREFIX%\\expected_summaries.txt       # [win]'
                      .format(tag, tag),
                  'fc.exe /W %PREFIX%\\expected_summaries.txt %PREFIX%\\summaries.txt # [win]',
                  'echo absolute{}relative{} > $PREFIX/expected_summaries.txt         # [not win]'
                      .format(tag, tag),
                  'diff -wuN ${PREFIX}/expected_summaries.txt ${PREFIX}/summaries.txt # [not win]'])
            ]))
        ])

        with open(filename, 'w') as outfile:
            outfile.write(yaml.dump(data, default_flow_style=False, width=999999999))
        # Reset the path because our broken, dummy `git` would cause `render_recipe`
        # to fail, while no `git` will cause the build_dependencies to be installed.
        monkeypatch.undo()
        # This will (after one spin round the loop) install and run 'git' with the
        # build env prepended to os.environ[]
        output = api.get_output_file_path(testing_workdir)[0]
        assert ("relative_submodules-{}-".format(tag) in output)
        api.build(testing_workdir)
コード例 #41
0
def build(m,
          config,
          post=None,
          need_source_download=True,
          need_reparse_in_env=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if m.skip():
        print_skip_message(m)
        return False

    if config.skip_existing:
        package_exists = is_package_built(m, config)
        if package_exists:
            print(m.dist(),
                  "is already built in {0}, skipping.".format(package_exists))
            return False

    if post in [False, None]:
        print("BUILD START:", m.dist())
        if m.uses_jinja and (need_source_download or need_reparse_in_env):
            print(
                "    (actual version deferred until further download or env creation)"
            )

        specs = [ms.spec for ms in m.ms_depends('build')]
        create_env(config.build_prefix, specs, config=config)
        vcs_source = m.uses_vcs_in_build
        if vcs_source and vcs_source not in specs:
            vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source
            has_vcs_available = os.path.isfile(
                external.find_executable(vcs_executable, config.build_prefix)
                or "")
            if not has_vcs_available:
                if (vcs_source != "mercurial" or not any(
                        spec.startswith('python') and "3." in spec
                        for spec in specs)):
                    specs.append(vcs_source)

                    log.warn(
                        "Your recipe depends on %s at build time (for templates), "
                        "but you have not listed it as a build dependency.  Doing "
                        "so for this build.", vcs_source)

                    # Display the name only
                    # Version number could be missing due to dependency on source info.
                    create_env(config.build_prefix, specs, config=config)
                else:
                    raise ValueError(
                        "Your recipe uses mercurial in build, but mercurial"
                        " does not yet support Python 3.  Please handle all of "
                        "your mercurial actions outside of your build script.")

        if need_source_download:
            # Execute any commands fetching the source (e.g., git) in the _build environment.
            # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
            # dependencies.
            with path_prepended(config.build_prefix):
                m, need_source_download, need_reparse_in_env = parse_or_try_download(
                    m,
                    no_download_source=False,
                    force_download=True,
                    config=config)
            assert not need_source_download, "Source download failed.  Please investigate."
            if m.uses_jinja:
                print("BUILD START (revised):", m.dist())

        if need_reparse_in_env:
            reparse(m, config=config)
            print("BUILD START (revised):", m.dist())

        if m.name() in [
                i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
        ]:
            print("%s is installed as a build dependency. Removing." %
                  m.name())
            index = get_build_index(config=config, clear_cache=False)
            actions = plan.remove_actions(config.build_prefix, [m.name()],
                                          index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        print("Package:", m.dist())

        with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"),
                                   timeout=config.timeout):
            # get_dir here might be just work, or it might be one level deeper,
            #    dependening on the source.
            src_dir = source.get_dir(config)
            if isdir(src_dir):
                print("source tree in:", src_dir)
            else:
                print("no source - creating empty work folder")
                os.makedirs(src_dir)

            rm_rf(config.info_dir)
            files1 = prefix_files(prefix=config.build_prefix)
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    log.warn(
                        "Glob %s from always_include_files does not match any files",
                        pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if isdir(src_dir):
                if on_win:
                    build_file = join(m.path, 'bld.bat')
                    if script:
                        build_file = join(src_dir, 'bld.bat')
                        with open(build_file, 'w') as bf:
                            bf.write(script)
                    import conda_build.windows as windows
                    windows.build(m, build_file, config=config)
                else:
                    build_file = join(m.path, 'build.sh')

                    # There is no sense in trying to run an empty build script.
                    if isfile(build_file) or script:
                        with path_prepended(config.build_prefix):
                            env = environ.get_dict(config=config, m=m)
                        env["CONDA_BUILD_STATE"] = "BUILD"
                        work_file = join(source.get_dir(config),
                                         'conda_build.sh')
                        if script:
                            with open(work_file, 'w') as bf:
                                bf.write(script)
                        if config.activate:
                            if isfile(build_file):
                                data = open(build_file).read()
                            else:
                                data = open(work_file).read()
                            with open(work_file, 'w') as bf:
                                bf.write(
                                    "source {conda_root}activate {build_prefix} &> "
                                    "/dev/null\n".format(
                                        conda_root=root_script_dir +
                                        os.path.sep,
                                        build_prefix=config.build_prefix))
                                bf.write(data)
                        else:
                            if not isfile(work_file):
                                copy_into(build_file, work_file,
                                          config.timeout)
                        os.chmod(work_file, 0o766)

                        if isfile(work_file):
                            cmd = [shell_path, '-x', '-e', work_file]
                            # this should raise if any problems occur while building
                            _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m, config=config)
        create_post_scripts(m, config=config)
        create_entry_points(m.get_value('build/entry_points'), config=config)
        files2 = prefix_files(prefix=config.build_prefix)

        post_process(sorted(files2 - files1),
                     prefix=config.build_prefix,
                     config=config,
                     preserve_egg_dir=bool(
                         m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files(prefix=config.build_prefix)
        if any(config.meta_dir in join(config.build_prefix, f)
               for f in files2 - files1):
            meta_files = (tuple(
                f for f in files2 - files1
                if config.meta_dir in join(config.build_prefix, f)), )
            sys.exit(
                indent(
                    """Error: Untracked file(s) %s found in conda-meta directory.
This error usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" % meta_files))
        post_build(m,
                   sorted(files2 - files1),
                   prefix=config.build_prefix,
                   build_python=config.build_python,
                   croot=config.croot)
        create_info_files(m,
                          sorted(files2 - files1),
                          config=config,
                          prefix=config.build_prefix)
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1),
                                    config.build_prefix)

        files3 = prefix_files(prefix=config.build_prefix)
        fix_permissions(files3 - files1, config.build_prefix)

        path = bldpkg_path(m, config)

        # lock the output directory while we build this file
        # create the tarball in a temporary directory to minimize lock time
        with TemporaryDirectory() as tmp:
            tmp_path = os.path.join(tmp, os.path.basename(path))
            t = tarfile.open(tmp_path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            # we're done building, perform some checks
            tarcheck.check_all(tmp_path)

            copy_into(tmp_path, path, config.timeout)
        update_index(config.bldpkgs_dir, config)

    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())

    # returning true here says package is OK to test
    return True