Ejemplo n.º 1
0
def tar_xf(tarball, dir_path, mode="r:*"):
    if tarball.lower().endswith(".tar.z"):
        uncompress = external.find_executable("uncompress")
        if not uncompress:
            uncompress = external.find_executable("gunzip")
        if not uncompress:
            sys.exit(
                """\
uncompress (or gunzip) is required to unarchive .z source files.
"""
            )
        subprocess.check_call([uncompress, "-f", tarball])
        tarball = tarball[:-2]
    if not PY3 and tarball.endswith(".tar.xz"):
        unxz = external.find_executable("unxz")
        if not unxz:
            sys.exit(
                """\
unxz is required to unarchive .xz source files.
"""
            )

        subprocess.check_call([unxz, "-f", "-k", tarball])
        tarball = tarball[:-3]
    t = tarfile.open(tarball, mode)
    t.extractall(path=dir_path)
    t.close()
Ejemplo n.º 2
0
def svn_source(meta):
    ''' Download a source from SVN repo. '''
    def parse_bool(s):
        return str(s).lower().strip() in ('yes', 'true', '1', 'on')

    svn = external.find_executable('svn')
    if not svn:
        sys.exit("Error: svn is not installed")
    svn_url = meta['svn_url']
    svn_revision = meta.get('svn_rev') or 'head'
    svn_ignore_externals = parse_bool(meta.get('svn_ignore_externals') or 'no')
    if not isdir(SVN_CACHE):
        os.makedirs(SVN_CACHE)
    svn_dn = svn_url.split(':', 1)[-1].replace('/', '_').replace(':', '_')
    cache_repo = join(SVN_CACHE, svn_dn)
    if svn_ignore_externals:
        extra_args = ['--ignore-externals']
    else:
        extra_args = []
    if isdir(cache_repo):
        check_call([svn, 'up', '-r', svn_revision] + extra_args,
                   cwd=cache_repo)
    else:
        check_call([svn, 'co', '-r', svn_revision] + extra_args +
                   [svn_url, cache_repo])
        assert isdir(cache_repo)

    # now copy into work directory
    copytree(cache_repo, WORK_DIR)
    return WORK_DIR
Ejemplo n.º 3
0
Archivo: source.py Proyecto: Vasyka/hat
def hg_source(meta):
    ''' Download a source from Mercurial repo. '''
    hg = external.find_executable('hg')
    if not hg:
        sys.exit('Error: hg not installed')
    hg_url = meta['hg_url']
    if not isdir(HG_CACHE):
        os.makedirs(HG_CACHE)
    hg_dn = hg_url.split(':')[-1].replace('/', '_')
    cache_repo = join(HG_CACHE, hg_dn)
    if isdir(cache_repo):
        execute([hg, 'pull'], cwd=cache_repo, check_exit_code=True)
    else:
        execute([hg, 'clone', hg_url, cache_repo], check_exit_code=True)
        assert isdir(cache_repo)

    # now clone in to work directory
    update = meta.get('hg_tag') or 'tip'
    print('checkout: %r' % update)

    execute([hg, 'clone', cache_repo, WORK_DIR],
            check_exit_code=True)
    execute([hg, 'update', '-C', update], cwd=WORK_DIR,
            check_exit_code=True)
    return WORK_DIR
Ejemplo n.º 4
0
def meta_vars(meta):
    d = {}
    for var_name in meta.get_value("build/script_env", []):
        value = os.getenv(var_name)
        if value is None:
            warnings.warn("The environment variable '%s' is undefined." % var_name, UserWarning)
        else:
            d[var_name] = value

    git_dir = join(source.get_dir(), ".git")
    if not isinstance(git_dir, str):
        # On Windows, subprocess env can't handle unicode.
        git_dir = git_dir.encode(sys.getfilesystemencoding() or "utf-8")

    if external.find_executable("git") and os.path.exists(git_dir):
        git_url = meta.get_value("source/git_url")

        if os.path.exists(git_url):
            # If git_url is a relative path instead of a url, convert it to an abspath
            git_url = normpath(join(meta.path, git_url))

        _x = False
        if git_url:
            _x = verify_git_repo(git_dir, git_url, meta.get_value("source/git_rev", "HEAD"))

        if _x or meta.get_value("source/path"):
            d.update(get_git_info(git_dir))

    d["PKG_NAME"] = meta.name()
    d["PKG_VERSION"] = meta.version()
    d["PKG_BUILDNUM"] = str(meta.build_number())
    d["PKG_BUILD_STRING"] = str(meta.build_id())
    d["RECIPE_DIR"] = meta.path
    return d
Ejemplo n.º 5
0
def handle_binstar_upload(path, args):
    import subprocess
    from conda_build.external import find_executable

    if args.binstar_upload is None:
        args.yes = False
        args.dry_run = False
#        upload = common.confirm_yn(
#            args,
#            message="Do you want to upload this "
#            "package to binstar", default='yes', exit_no=False)
        upload = False
    else:
        upload = args.binstar_upload

    if not upload:
        print("""\
# If you want to upload this package to binstar.org later, type:
#
# $ binstar upload %s
""" % path)
        return

    binstar = find_executable('binstar')
    if binstar is None:
        sys.exit('''
Error: cannot locate binstar (required for upload)
# Try:
# $ conda install binstar
''')
    print("Uploading to binstar")
    args = [binstar, 'upload', path]
    if config.binstar_personal:
        args += ['--personal']
    subprocess.call(args)
Ejemplo n.º 6
0
def git_source(git_url, cache_dir, target_directory, git_rev=None):
    """
    Clone the given git resource into the target directory.
    Caching the git repository in the cache directory.

    This is an adaptation of conda_build.source.git_source.

    """
    git = find_executable('git')
    git_dn = git_url
    if git_rev:
        git_dn += '_{}'.format(git_rev)
    git_dn = git_dn.split(':')[-1].replace('/', '_')
    cache_repo = os.path.join(cache_dir, git_dn)
    if not os.path.exists(os.path.dirname(cache_repo)):
        os.makedirs(os.path.dirname(cache_repo))
    if os.path.isdir(cache_repo):
        subprocess.check_call([git, 'fetch'], cwd=cache_repo)
    else:
        subprocess.check_call([git, 'clone', '--mirror', git_url, cache_repo])

    if os.path.exists(target_directory):
        raise IOError('{} already exists. Remove first.'.format(target_directory))

    subprocess.check_call([git, 'clone', '--recursive', cache_repo, target_directory])
    if git_rev:
        subprocess.check_call([git, 'checkout', git_rev], cwd=target_directory)
    return target_directory
Ejemplo n.º 7
0
def git_source(meta):
    """ Download a source from Git repo. """
    if not isdir(GIT_CACHE):
        os.makedirs(GIT_CACHE)

    git = external.find_executable("git")
    if not git:
        sys.exit("Error: git is not installed")
    git_url = meta["git_url"]
    git_dn = git_url.split(":")[-1].replace("/", "_")
    cache_repo = cache_repo_arg = join(GIT_CACHE, git_dn)
    if sys.platform == "win32":
        cache_repo_arg = cache_repo_arg.replace("\\", "/")
        if os.getenv("USERNAME") == "builder":
            cache_repo_arg = "/cygdrive/c/" + cache_repo_arg[3:]

    # update (or craete) the cache repo
    if isdir(cache_repo):
        check_call([git, "fetch"], cwd=cache_repo)
    else:
        check_call([git, "clone", "--mirror", git_url, cache_repo_arg])
        assert isdir(cache_repo)

    # now clone into the work directory
    checkout = meta.get("git_tag") or meta.get("git_branch") or "master"
    print("checkout: %r" % checkout)

    check_call([git, "clone", cache_repo_arg, WORK_DIR])
    check_call([git, "checkout", checkout], cwd=WORK_DIR)

    git_info()
    return WORK_DIR
Ejemplo n.º 8
0
def git_source(meta):
    ''' Download a source from Git repo. '''
    if not isdir(GIT_CACHE):
        os.makedirs(GIT_CACHE)

    git = external.find_executable('git')
    if not git:
        sys.exit("Error: git is not installed")
    git_url = meta['git_url']
    git_dn = git_url.split(':')[-1].replace('/', '_')
    cache_repo = cache_repo_arg = join(GIT_CACHE, git_dn)
    if sys.platform == 'win32':
        cache_repo_arg = cache_repo_arg.replace('\\', '/')
        if os.getenv('USERNAME') == 'builder':
            cache_repo_arg = '/cygdrive/c/' + cache_repo_arg[3:]

    # update (or craete) the cache repo
    if isdir(cache_repo):
        check_call([git, 'fetch'], cwd=cache_repo)
    else:
        check_call([git, 'clone', '--mirror', git_url, cache_repo_arg])
        assert isdir(cache_repo)

    # now clone into the work directory
    checkout = meta.get('git_tag') or meta.get('git_branch')
    if checkout:
        print('checkout: %r' % checkout)

    check_call([git, 'clone', cache_repo_arg, WORK_DIR])
    if checkout:
        check_call([git, 'checkout', checkout], cwd=WORK_DIR)

    git_info()
    return WORK_DIR
Ejemplo n.º 9
0
def apply_patch(src_dir, path):
    print('Applying patch: %r' % path)
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    patch = external.find_executable('patch')
    if patch is None:
        sys.exit("""\
Error:
    Did not find 'patch' in: %s
    You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
    or conda, cygwin (Windows),
""" % (os.pathsep.join(external.dir_paths)))
    patch_args = ['-p0', '-i', path]
    if sys.platform == 'win32':
        patch_args[-1] = _ensure_unix_line_endings(path)
    try:
        check_call([patch] + patch_args, cwd=src_dir)
    except CalledProcessError:
        # fallback to -p1, the git default
        patch_args[0] = '-p1'
        try:
            check_call([patch] + patch_args, cwd=src_dir)
        except CalledProcessError:
            sys.exit(1)
    if sys.platform == 'win32' and os.path.exists(patch_args[-1]):
        os.remove(patch_args[-1])  # clean up .patch_unix file
def git_source(meta, recipe_dir, GIT_CACHE):
    if not isdir(GIT_CACHE):
        os.makedirs(GIT_CACHE)

    git = external.find_executable('git')
    if not git:
        sys.exit("Error: git is not installed")
    git_url = meta['git_url']
    if git_url.startswith('.'):
        # It's a relative path from the conda recipe
        os.chdir(recipe_dir)
        git_dn = abspath(expanduser(git_url))
        git_dn = "_".join(git_dn.split(os.path.sep)[1:])
    else:
        git_dn = git_url.split(':')[-1].replace('/', '_')
    cache_repo = cache_repo_arg = join(GIT_CACHE, git_dn)

    # update (or create) the cache repo
    print('Fetch {}'.format(git_url))
    if isdir(cache_repo):
        execute([git, 'fetch'], cwd=cache_repo, check_exit_code=True)
    else:
        execute([git, 'clone', '--mirror', git_url, cache_repo_arg],
                cwd=recipe_dir, check_exit_code=True)
        assert isdir(cache_repo)
Ejemplo n.º 11
0
def git_source(meta):
    ''' Download a source from Git repo. '''
    if not isdir(GIT_CACHE):
        os.makedirs(GIT_CACHE)

    git = external.find_executable('git')
    if not git:
        sys.exit("Error: git is not installed")
    git_url = meta['git_url']
    git_dn = git_url.split(':')[-1].replace('/', '_')
    cache_repo = cache_repo_arg = join(GIT_CACHE, git_dn)
    if sys.platform == 'win32':
        cache_repo_arg = cache_repo_arg.replace('\\', '/')
        if os.getenv('USERNAME') == 'builder':
            cache_repo_arg = '/cygdrive/c/' + cache_repo_arg[3:]

    # update (or craete) the cache repo
    if isdir(cache_repo):
        check_call([git, 'fetch'], cwd=cache_repo)
    else:
        check_call([git, 'clone', '--mirror', git_url, cache_repo_arg])
        assert isdir(cache_repo)

    # now clone into the work directory
    checkout = meta.get('git_tag') or meta.get('git_branch') or 'master'
    print('checkout: %r' % checkout)

    check_call([git, 'clone', cache_repo_arg, WORK_DIR])
    check_call([git, 'checkout', checkout], cwd=WORK_DIR)

    git_info()
    return WORK_DIR
Ejemplo n.º 12
0
Archivo: post.py Proyecto: Vasyka/hat
def mk_relative_linux(f, rpaths=('lib',)):
    path = join(config.build_prefix, f)
    rpath = ':'.join('$ORIGIN/' + utils.relative(f, d) if not
        d.startswith('/') else d for d in rpaths)
    patchelf = external.find_executable('patchelf')
    print('patchelf: file: %s\n    setting rpath to: %s' % (path, rpath))
    utils.execute([patchelf, '--force-rpath', '--set-rpath', rpath, path])
Ejemplo n.º 13
0
def mk_relative_linux(f, rpaths=('lib',)):
    path = join(config.build_prefix, f)
    rpath = ':'.join('$ORIGIN/' + utils.relative(f, d) if not
        d.startswith('/') else d for d in rpaths)
    patchelf = external.find_executable('patchelf')
    print('patchelf: file: %s\n    setting rpath to: %s' % (path, rpath))
    call([patchelf, '--force-rpath', '--set-rpath', rpath, path])
Ejemplo n.º 14
0
def svn_source(meta):
    ''' Download a source from SVN repo. '''
    def parse_bool(s):
        return str(s).lower().strip() in ('yes', 'true', '1', 'on')

    svn = external.find_executable('svn')
    if not svn:
        sys.exit("Error: svn is not installed")
    svn_url = meta['svn_url']
    svn_revision = meta.get('svn_rev') or 'head'
    svn_ignore_externals = parse_bool(meta.get('svn_ignore_externals') or 'no')
    if not isdir(SVN_CACHE):
        os.makedirs(SVN_CACHE)
    svn_dn = svn_url.split(':', 1)[-1].replace('/', '_').replace(':', '_')
    cache_repo = join(SVN_CACHE, svn_dn)
    if svn_ignore_externals:
        extra_args = ['--ignore-externals']
    else:
        extra_args = []
    if isdir(cache_repo):
        check_call([svn, 'up', '-r', svn_revision] + extra_args, cwd=cache_repo)
    else:
        check_call([svn, 'co', '-r', svn_revision] + extra_args + [svn_url,
                                                                   cache_repo])
        assert isdir(cache_repo)

    # now copy into work directory
    copytree(cache_repo, WORK_DIR)
    return WORK_DIR
Ejemplo n.º 15
0
def svn_source(meta):
    """ Download a source from SVN repo. """

    def parse_bool(s):
        return str(s).lower().strip() in ("yes", "true", "1", "on")

    svn = external.find_executable("svn")
    if not svn:
        sys.exit("Error: svn is not installed")
    svn_url = meta["svn_url"]
    svn_revision = meta.get("svn_rev") or "head"
    svn_ignore_externals = parse_bool(meta.get("svn_ignore_externals") or "no")
    if not isdir(SVN_CACHE):
        os.makedirs(SVN_CACHE)
    svn_dn = svn_url.split(":", 1)[-1].replace("/", "_").replace(":", "_")
    cache_repo = join(SVN_CACHE, svn_dn)
    if svn_ignore_externals:
        extra_args = ["--ignore-externals"]
    else:
        extra_args = []
    if isdir(cache_repo):
        check_call([svn, "up", "-r", svn_revision] + extra_args, cwd=cache_repo)
    else:
        check_call([svn, "co", "-r", svn_revision] + extra_args + [svn_url, cache_repo])
        assert isdir(cache_repo)

    # now copy into work directory
    copytree(cache_repo, WORK_DIR, ignore=ignore_patterns(".svn"))
    return WORK_DIR
Ejemplo n.º 16
0
def git_source(meta, recipe_dir):
    ''' Download a source from Git repo. '''
    if not isdir(GIT_CACHE):
        os.makedirs(GIT_CACHE)

    git = external.find_executable('git')
    if not git:
        sys.exit("Error: git is not installed")
    git_url = meta['git_url']
    git_depth = int(meta.get('git_depth', -1))
    if git_url.startswith('.'):
        # It's a relative path from the conda recipe
        os.chdir(recipe_dir)
        git_dn = abspath(expanduser(git_url))
        git_dn = "_".join(git_dn.split(os.path.sep)[1:])
    else:
        git_dn = git_url.split(':')[-1].replace('/', '_')
    cache_repo = cache_repo_arg = join(GIT_CACHE, git_dn)
    if sys.platform == 'win32':
        is_cygwin = 'cygwin' in git.lower()
        cache_repo_arg = cache_repo_arg.replace('\\', '/')
        if is_cygwin:
            cache_repo_arg = '/cygdrive/c/' + cache_repo_arg[3:]

    # update (or create) the cache repo
    if isdir(cache_repo):
        check_call([git, 'fetch'], cwd=cache_repo)
    else:
        args = [git, 'clone', '--mirror']
        if git_depth > 0:
            args += ['--depth', git_depth]

        check_call(args + [git_url, cache_repo_arg], cwd=recipe_dir)
        assert isdir(cache_repo)

    # now clone into the work directory
    checkout = meta.get('git_rev')
    # if rev is not specified, and the git_url is local,
    # assume the user wants the current HEAD
    if not checkout and git_url.startswith('.'):
        process = Popen(["git", "rev-parse", "HEAD"],
                        stdout=PIPE,
                        stderr=PIPE,
                        cwd=git_url)
        output = process.communicate()[0].strip()
        checkout = output.decode('utf-8')
    if checkout:
        print('checkout: %r' % checkout)

    check_call([git, 'clone', '--recursive', cache_repo_arg, WORK_DIR])
    if checkout:
        check_call([git, 'checkout', checkout], cwd=WORK_DIR)

    git_info()
    return WORK_DIR
Ejemplo n.º 17
0
def convert_unix_path_to_win(path):
    if external.find_executable('cygpath'):
        cmd = "cygpath -w {0}".format(path)
        if PY3:
            path = subprocess.getoutput(cmd)
        else:
            path = subprocess.check_output(cmd.split()).rstrip().rstrip("\\")

    else:
        path = unix_path_to_win(path)
    return path
Ejemplo n.º 18
0
def git_source(meta, recipe_dir):
    ''' Download a source from Git repo. '''
    if not isdir(GIT_CACHE):
        os.makedirs(GIT_CACHE)

    git = external.find_executable('git')
    if not git:
        sys.exit("Error: git is not installed")
    git_url = meta['git_url']
    git_depth = int(meta.get('git_depth', -1))
    if git_url.startswith('.'):
        # It's a relative path from the conda recipe
        os.chdir(recipe_dir)
        git_dn = abspath(expanduser(git_url))
        git_dn = "_".join(git_dn.split(os.path.sep)[1:])
    else:
        git_dn = git_url.split(':')[-1].replace('/', '_')
    cache_repo = cache_repo_arg = join(GIT_CACHE, git_dn)
    if sys.platform == 'win32':
        is_cygwin = 'cygwin' in git.lower()
        cache_repo_arg = cache_repo_arg.replace('\\', '/')
        if is_cygwin:
            cache_repo_arg = '/cygdrive/c/' + cache_repo_arg[3:]

    # update (or create) the cache repo
    if isdir(cache_repo):
        check_call([git, 'fetch'], cwd=cache_repo)
    else:
        args = [git, 'clone', '--mirror']
        if git_depth > 0:
            args += ['--depth', git_depth]

        check_call(args + [git_url, cache_repo_arg],  cwd=recipe_dir)
        assert isdir(cache_repo)

    # now clone into the work directory
    checkout = meta.get('git_rev')
    # if rev is not specified, and the git_url is local,
    # assume the user wants the current HEAD
    if not checkout and git_url.startswith('.'):
        process = Popen(["git", "rev-parse", "HEAD"],
                    stdout=PIPE, stderr=PIPE,
                               cwd=git_url)
        output = process.communicate()[0].strip()
        checkout = output.decode('utf-8')
    if checkout:
        print('checkout: %r' % checkout)

    check_call([git, 'clone', '--recursive', cache_repo_arg, WORK_DIR])
    if checkout:
        check_call([git, 'checkout', checkout], cwd=WORK_DIR)

    git_info()
    return WORK_DIR
Ejemplo n.º 19
0
def convert_unix_path_to_win(path):
    if external.find_executable('cygpath'):
        cmd = "cygpath -w {0}".format(path)
        if PY3:
            path = subprocess.getoutput(cmd)
        else:
            path = subprocess.check_output(cmd.split()).rstrip().rstrip("\\")

    else:
        path = unix_path_to_win(path)
    return path
Ejemplo n.º 20
0
def tar_xf(tarball, dir_path, mode='r:*'):
    if tarball.lower().endswith('.tar.z'):
        uncompress = external.find_executable('uncompress')
        if not uncompress:
            sys.exit("""\
uncompress is required to unarchive .z source files.
""")
        subprocess.check_call([uncompress, '-f', tarball])
        tarball = tarball[:-2]
    if not PY3 and tarball.endswith('.tar.xz'):
        unxz = external.find_executable('unxz')
        if not unxz:
            sys.exit("""\
unxz is required to unarchive .xz source files.
""")

        subprocess.check_call([unxz, '-f', '-k', tarball])
        tarball = tarball[:-3]
    t = tarfile.open(tarball, mode)
    t.extractall(path=dir_path)
    t.close()
Ejemplo n.º 21
0
def handle_binstar_upload(path, args):
    import subprocess
    from conda_build.external import find_executable

    upload = False
    if args.token or args.user:
        args.yes = True
        upload = True
    # this is the default, for no explicit argument.
    # remember that args.binstar_upload takes defaults from condarc
    elif args.binstar_upload is None:
        args.yes = False
        args.dry_run = False
    # rc file has uploading explicitly turned off
    elif args.binstar_upload is False:
        print("# Automatic uploading is disabled")
    else:
        upload = True

    no_upload_message = """\
# If you want to upload this package to anaconda.org later, type:
#
# $ anaconda upload %s
#
# To have conda build upload to anaconda.org automatically, use
# $ conda config --set anaconda_upload yes
""" % path
    if not upload:
        print(no_upload_message)
        return

    binstar = find_executable('anaconda')
    if binstar is None:
        print(no_upload_message)
        sys.exit('''
Error: cannot locate anaconda command (required for upload)
# Try:
# $ conda install anaconda-client
''')
    print("Uploading to anaconda.org")
    cmd = [binstar, ]

    if hasattr(args, "token") and args.token:
        cmd.extend(['--token', args.token])
    cmd.append('upload')
    if hasattr(args, "user") and args.user:
        cmd.extend(['--user', args.user])
    cmd.append(path)
    try:
        subprocess.call(cmd)
    except:
        print(no_upload_message)
        raise
Ejemplo n.º 22
0
def tar_xf(tarball, dir_path, mode='r:*'):
    if tarball.lower().endswith('.tar.z'):
        uncompress = external.find_executable('uncompress')
        if not uncompress:
            sys.exit("""\
uncompress is required to unarchive .z source files.
""")
        subprocess.check_call([uncompress, '-f', tarball])
        tarball = tarball[:-2]
    if not PY3 and tarball.endswith('.tar.xz'):
        unxz = external.find_executable('unxz')
        if not unxz:
            sys.exit("""\
unxz is required to unarchive .xz source files.
""")

        subprocess.check_call([unxz, '-f', '-k', tarball])
        tarball = tarball[:-3]
    t = tarfile.open(tarball, mode)
    t.extractall(path=dir_path)
    t.close()
Ejemplo n.º 23
0
def handle_binstar_upload(path, args):
    import subprocess
    from conda_build.external import find_executable

    upload = False
    if args.token or args.user:
        args.yes = True
        upload = True
    # this is the default, for no explicit argument.
    # remember that args.binstar_upload takes defaults from condarc
    elif args.binstar_upload is None:
        args.yes = False
        args.dry_run = False
    # rc file has uploading explicitly turned off
    elif args.binstar_upload is False:
        print("# Automatic uploading is disabled")
    else:
        upload = True

    no_upload_message = """\
# If you want to upload this package to anaconda.org later, type:
#
# $ anaconda upload %s
#
# To have conda build upload to anaconda.org automatically, use
# $ conda config --set anaconda_upload yes
""" % path
    if not upload:
        print(no_upload_message)
        return

    binstar = find_executable('anaconda')
    if binstar is None:
        print(no_upload_message)
        sys.exit('''
Error: cannot locate anaconda command (required for upload)
# Try:
# $ conda install anaconda-client
''')
    print("Uploading to anaconda.org")
    cmd = [binstar, ]

    if hasattr(args, "token") and args.token:
        cmd.extend(['--token', args.token])
    cmd.append('upload')
    if hasattr(args, "user") and args.user:
        cmd.extend(['--user', args.user])
    cmd.append(path)
    try:
        subprocess.call(cmd)
    except:
        print(no_upload_message)
        raise
Ejemplo n.º 24
0
def handle_binstar_upload(path):
    from conda_build.external import find_executable
    binstar = find_executable('binstar')
    if binstar is None:
        sys.exit('''
Error: cannot locate binstar (required for upload)
# Try:
# $ conda install binstar
''')
    print("Uploading to binstar")
    args = [binstar, 'upload', path]
    subprocess.call(args)
Ejemplo n.º 25
0
def handle_binstar_upload(path):
    from conda_build.external import find_executable
    binstar = find_executable('binstar')
    if binstar is None:
        sys.exit('''
Error: cannot locate binstar (required for upload)
# Try:
# $ conda install binstar
''')
    print("Uploading to binstar")
    args = [binstar, 'upload', path]
    subprocess.call(args)
Ejemplo n.º 26
0
def mk_relative(f):
    assert sys.platform != 'win32'
    if f.startswith('bin/'):
        fix_shebang(f)

    path = join(build_prefix, f)
    if sys.platform.startswith('linux') and is_obj(path):
        rpath = '$ORIGIN/' + utils.rel_lib(f)
        chrpath = external.find_executable('chrpath')
        call([chrpath, '-r', rpath, path])

    if sys.platform == 'darwin' and is_obj(path):
        mk_relative_osx(path)
Ejemplo n.º 27
0
def handle_binstar_upload(path, args):
    import subprocess
    from conda_build.external import find_executable

    if args.binstar_upload is None:
        args.yes = False
        args.dry_run = False
        #        upload = common.confirm_yn(
        #            args,
        #            message="Do you want to upload this "
        #            "package to binstar", default='yes', exit_no=False)
        upload = False
    else:
        upload = args.binstar_upload

    no_upload_message = """\
# If you want to upload this package to anaconda.org later, type:
#
# $ anaconda upload %s
#
# To have conda build upload to anaconda.org automatically, use
# $ conda config --set anaconda_upload yes
""" % path
    if not upload:
        print(no_upload_message)
        return

    binstar = find_executable('anaconda')
    if binstar is None:
        print(no_upload_message)
        sys.exit('''
Error: cannot locate anaconda command (required for upload)
# Try:
# $ conda install anaconda-client
''')
    print("Uploading to anaconda.org")
    cmd = [
        binstar,
    ]

    if hasattr(args, "token") and args.token:
        cmd.extend(['--token', args.token])
    cmd.append('upload')
    if hasattr(args, "user") and args.user:
        cmd.extend(['--user', args.user])
    cmd.append(path)
    try:
        subprocess.call(cmd)
    except:
        print(no_upload_message)
        raise
Ejemplo n.º 28
0
def apply_patch(src_dir, path):
    print('Applying patch: %r' % path)
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    patch = external.find_executable('patch')
    if patch is None:
        sys.exit("""\
Error:
    Did not find 'patch' in: %s
    You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
    or conda, cygwin (Windows),
""" % (os.pathsep.join(external.dir_paths)))
    check_call([patch, '-p0', '-i', path], cwd=src_dir)
Ejemplo n.º 29
0
def check_external():
    import os
    import conda_build.external as external

    if sys.platform.startswith('linux'):
        chrpath = external.find_executable('chrpath')
        if chrpath is None:
            sys.exit("""\
Error:
    Did not find 'chrpath' in: %s
    'chrpath' is necessary for building conda packages on Linux with
    relocatable ELF libraries.  You can install chrpath using apt-get,
    yum or conda.
""" % (os.pathsep.join(external.dir_paths)))
Ejemplo n.º 30
0
def check_external():
    import os
    import conda_build.external as external

    if sys.platform.startswith('linux'):
        patchelf = external.find_executable('patchelf')
        if patchelf is None:
            sys.exit("""\
Error:
    Did not find 'patchelf' in: %s
    'patchelf' is necessary for building conda packages on Linux with
    relocatable ELF libraries.  You can install patchelf using conda install
    patchelf.
""" % (os.pathsep.join(external.dir_paths)))
Ejemplo n.º 31
0
def apply_patch(src_dir, path):
    print('Applying patch: %r' % path)
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    patch = external.find_executable('patch')
    if patch is None:
        sys.exit("""\
Error:
    Did not find 'patch' in: %s
    You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
    or conda, cygwin (Windows),
""" % (os.pathsep.join(external.dir_paths)))
    check_call([patch, '-p0', '-i', path], cwd=src_dir)
Ejemplo n.º 32
0
def check_external():
    import os
    import conda_build.external as external

    if sys.platform.startswith('linux'):
        patchelf = external.find_executable('patchelf')
        if patchelf is None:
            sys.exit("""\
Error:
    Did not find 'patchelf' in: %s
    'patchelf' is necessary for building conda packages on Linux with
    relocatable ELF libraries.  You can install patchelf using conda install
    patchelf.
""" % (os.pathsep.join(external.dir_paths)))
Ejemplo n.º 33
0
def handle_binstar_upload(path, args):
    import subprocess
    from conda_build.external import find_executable

    if args.binstar_upload is None:
        args.yes = False
        args.dry_run = False
#        upload = common.confirm_yn(
#            args,
#            message="Do you want to upload this "
#            "package to binstar", default='yes', exit_no=False)
        upload = False
    else:
        upload = args.binstar_upload

    no_upload_message = """\
# If you want to upload this package to anaconda.org later, type:
#
# $ anaconda upload %s
#
# To have conda build upload to anaconda.org automatically, use
# $ conda config --set anaconda_upload yes
""" % path
    if not upload:
        print(no_upload_message)
        return

    binstar = find_executable('anaconda')
    if binstar is None:
        print(no_upload_message)
        sys.exit('''
Error: cannot locate anaconda command (required for upload)
# Try:
# $ conda install anaconda-client
''')
    print("Uploading to anaconda.org")
    cmd = [binstar, ]

    if hasattr(args, "token") and args.token:
        cmd.extend(['--token', args.token])
    cmd.append('upload')
    if hasattr(args, "user") and args.user:
        cmd.extend(['--user', args.user])
    cmd.append(path)
    try:
        subprocess.call(cmd)
    except:
        print(no_upload_message)
        raise
Ejemplo n.º 34
0
def mk_relative(f, binary_relocation=True):
    assert sys.platform != 'win32'

    if not binary_relocation:
        return

    path = join(config.build_prefix, f)
    if sys.platform.startswith('linux') and is_obj(path):
        rpath = '$ORIGIN/' + utils.rel_lib(f)
        patchelf = external.find_executable('patchelf')
        print('patchelf: file: %s\n    setting rpath to: %s' % (path, rpath))
        call([patchelf, '--set-rpath', rpath, path])

    if sys.platform == 'darwin' and is_obj(path):
        mk_relative_osx(path)
Ejemplo n.º 35
0
def handle_binstar_upload(path):
    from conda_build.external import find_executable

    binstar = find_executable("anaconda")
    if binstar is None:
        sys.exit(
            """
Error: cannot locate anaconda command (required for upload)
# Try:
# $ conda install anaconda-client
"""
        )
    print("Uploading to anaconda.org")
    args = [binstar, "upload", path]
    subprocess.call(args)
Ejemplo n.º 36
0
def mk_relative(m, f):
    assert sys.platform != 'win32'
    path = join(config.build_prefix, f)
    if not is_obj(path):
        return

    if sys.platform.startswith('linux'):
        rpath = ':'.join('$ORIGIN/' + utils.relative(f, d) for d in
                         m.get_value('build/rpaths', ['lib']))
        patchelf = external.find_executable('patchelf')
        print('patchelf: file: %s\n    setting rpath to: %s' % (path, rpath))
        call([patchelf, '--set-rpath', rpath, path])

    elif sys.platform == 'darwin':
        mk_relative_osx(path)
Ejemplo n.º 37
0
def mk_relative(f, binary_relocation=True):
    assert sys.platform != 'win32'

    if not binary_relocation:
        return

    path = join(config.build_prefix, f)
    if sys.platform.startswith('linux') and is_obj(path):
        rpath = '$ORIGIN/' + utils.rel_lib(f)
        patchelf = external.find_executable('patchelf')
        print('patchelf: file: %s\n    setting rpath to: %s' % (path, rpath))
        call([patchelf, '--set-rpath', rpath, path])

    if sys.platform == 'darwin' and is_obj(path):
        mk_relative_osx(path)
Ejemplo n.º 38
0
def handle_binstar_upload(path, args):
    import subprocess
    from conda_build.external import find_executable

    if args.binstar_upload is None:
        args.yes = False
        args.dry_run = False
        #        upload = common.confirm_yn(
        #            args,
        #            message="Do you want to upload this "
        #            "package to binstar", default='yes', exit_no=False)
        upload = False
    else:
        upload = args.binstar_upload

    no_upload_message = (
        """\
# If you want to upload this package to anaconda.org later, type:
#
# $ anaconda upload %s
#
# To have conda build upload to anaconda.org automatically, use
# $ conda config --set anaconda_upload yes
"""
        % path
    )
    if not upload:
        print(no_upload_message)
        return

    binstar = find_executable("anaconda")
    if binstar is None:
        print(no_upload_message)
        sys.exit(
            """
Error: cannot locate anaconda command (required for upload)
# Try:
# $ conda install anaconda-client
"""
        )
    print("Uploading to anaconda.org")
    args = [binstar, "upload", path]
    try:
        subprocess.call(args)
    except:
        print(no_upload_message)
        raise
Ejemplo n.º 39
0
def apply_patch(src_dir, path):
    print('Applying patch: %r' % path)
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    patch = external.find_executable('patch')
    if patch is None:
        sys.exit("""\
Error:
    Did not find 'patch' in: %s
    You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
    or conda, cygwin (Windows),
""" % (os.pathsep.join(external.dir_paths)))
    patch_args = ['-p0', '-i', path]
    if sys.platform == 'win32':
        patch_args[-1] =  _ensure_unix_line_endings(path)
    check_call([patch, ] + patch_args, cwd=src_dir)
Ejemplo n.º 40
0
def mk_relative(f, binary_relocation=True):
    assert sys.platform != "win32"
    if f.startswith("bin/"):
        fix_shebang(f)

    if not binary_relocation:
        return

    path = join(build_prefix, f)
    if sys.platform.startswith("linux") and is_obj(path):
        rpath = "$ORIGIN/" + utils.rel_lib(f)
        patchelf = external.find_executable("patchelf")
        print("patchelf: file: %s\n    setting rpath to: %s" % (path, rpath))
        call([patchelf, "--set-rpath", rpath, path])

    if sys.platform == "darwin" and is_obj(path):
        mk_relative_osx(path)
Ejemplo n.º 41
0
def svn_source(meta, verbose=False):
    ''' Download a source from SVN repo. '''
    if verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stdout = FNULL
        stderr = FNULL

    def parse_bool(s):
        return str(s).lower().strip() in ('yes', 'true', '1', 'on')

    svn = external.find_executable('svn')
    if not svn:
        sys.exit("Error: svn is not installed")
    svn_url = meta['svn_url']
    svn_revision = meta.get('svn_rev') or 'head'
    svn_ignore_externals = parse_bool(meta.get('svn_ignore_externals') or 'no')
    if not isdir(SVN_CACHE):
        os.makedirs(SVN_CACHE)
    svn_dn = svn_url.split(':', 1)[-1].replace('/', '_').replace(':', '_')
    cache_repo = join(SVN_CACHE, svn_dn)
    if svn_ignore_externals:
        extra_args = ['--ignore-externals']
    else:
        extra_args = []
    if isdir(cache_repo):
        check_call([svn, 'up', '-r', svn_revision] + extra_args,
                   cwd=cache_repo,
                   stdout=stdout,
                   stderr=stderr)
    else:
        check_call([svn, 'co', '-r', svn_revision] + extra_args +
                   [svn_url, cache_repo],
                   stdout=stdout,
                   stderr=stderr)
        assert isdir(cache_repo)

    # now copy into work directory
    copytree(cache_repo, WORK_DIR, symlinks=True)

    if not verbose:
        FNULL.close()

    return WORK_DIR
Ejemplo n.º 42
0
def apply_patch(src_dir, path):
    print('Applying patch: %r' % path)
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    patch = external.find_executable('patch')
    if patch is None:
        sys.exit("""\
Error:
    Did not find 'patch' in: %s
    You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
    or conda, cygwin (Windows),
""" % (os.pathsep.join(external.dir_paths)))
    if sys.platform == 'win32':
        # without --binary flag CR will be stripped and patch will fail
        check_call([patch, '-p0', '--binary', '-i', path], cwd=src_dir)
    else:
        check_call([patch, '-p0', '-i', path], cwd=src_dir)
Ejemplo n.º 43
0
def apply_patch(src_dir, path):
    print('Applying patch: %r' % path)
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    patch = external.find_executable('patch')
    if patch is None:
        sys.exit("""\
Error:
    Did not find 'patch' in: %s
    You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
    or conda, cygwin (Windows),
""" % (os.pathsep.join(external.dir_paths)))
    if sys.platform == 'win32':
        # without --binary flag CR will be stripped and patch will fail
        check_call([patch, '-p0', '--binary', '-i', path], cwd=src_dir)
    else:
        check_call([patch, '-p0', '-i', path], cwd=src_dir)
Ejemplo n.º 44
0
def hg_source(meta, verbose=False):
    ''' Download a source from Mercurial repo. '''
    if verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stdout = FNULL
        stderr = FNULL

    hg = external.find_executable('hg')
    if not hg:
        sys.exit('Error: hg not installed')
    hg_url = meta['hg_url']
    if not isdir(HG_CACHE):
        os.makedirs(HG_CACHE)
    hg_dn = hg_url.split(':')[-1].replace('/', '_')
    cache_repo = join(HG_CACHE, hg_dn)
    if isdir(cache_repo):
        check_call([hg, 'pull'], cwd=cache_repo, stdout=stdout, stderr=stderr)
    else:
        check_call([hg, 'clone', hg_url, cache_repo],
                   stdout=stdout,
                   stderr=stderr)
        assert isdir(cache_repo)

    # now clone in to work directory
    update = meta.get('hg_tag') or 'tip'
    if verbose:
        print('checkout: %r' % update)

    check_call([hg, 'clone', cache_repo, WORK_DIR],
               stdout=stdout,
               stderr=stderr)
    check_call([hg, 'update', '-C', update],
               cwd=WORK_DIR,
               stdout=stdout,
               stderr=stderr)

    if not verbose:
        FNULL.close()

    return WORK_DIR
Ejemplo n.º 45
0
def svn_source(meta, verbose=False):
    ''' Download a source from SVN repo. '''
    if verbose:
        stdout = None
        stderr = None
    else:
        FNULL = open(os.devnull, 'w')
        stdout = FNULL
        stderr = FNULL

    def parse_bool(s):
        return str(s).lower().strip() in ('yes', 'true', '1', 'on')

    svn = external.find_executable('svn')
    if not svn:
        sys.exit("Error: svn is not installed")
    svn_url = meta['svn_url']
    svn_revision = meta.get('svn_rev') or 'head'
    svn_ignore_externals = parse_bool(meta.get('svn_ignore_externals') or 'no')
    if not isdir(SVN_CACHE):
        os.makedirs(SVN_CACHE)
    svn_dn = svn_url.split(':', 1)[-1].replace('/', '_').replace(':', '_')
    cache_repo = join(SVN_CACHE, svn_dn)
    if svn_ignore_externals:
        extra_args = ['--ignore-externals']
    else:
        extra_args = []
    if isdir(cache_repo):
        check_call([svn, 'up', '-r', svn_revision] + extra_args, cwd=cache_repo,
                   stdout=stdout, stderr=stderr)
    else:
        check_call([svn, 'co', '-r', svn_revision] + extra_args + [svn_url, cache_repo],
                   stdout=stdout, stderr=stderr)
        assert isdir(cache_repo)

    # now copy into work directory
    copytree(cache_repo, WORK_DIR, symlinks=True)

    if not verbose:
        FNULL.close()

    return WORK_DIR
Ejemplo n.º 46
0
def handle_binstar_upload(path, args):
    import subprocess
    from conda_build.external import find_executable

    if args.binstar_upload is None:
        args.yes = False
        args.dry_run = False
#        upload = common.confirm_yn(
#            args,
#            message="Do you want to upload this "
#            "package to binstar", default='yes', exit_no=False)
        upload = False
    else:
        upload = args.binstar_upload

    no_upload_message = """\
# If you want to upload this package to binstar.org later, type:
#
# $ binstar upload %s
#
# To have conda build upload to binstar automatically, use
# $ conda config --set binstar_upload yes
""" % path
    if not upload:
        print(no_upload_message)
        return

    binstar = find_executable('binstar')
    if binstar is None:
        print(no_upload_message)
        sys.exit('''
Error: cannot locate binstar (required for upload)
# Try:
# $ conda install binstar
''')
    print("Uploading to binstar")
    args = [binstar, 'upload', path]
    try:
        subprocess.call(args)
    except:
        print(no_upload_message)
        raise
Ejemplo n.º 47
0
def apply_patch(src_dir, path):
    print('Applying patch: %r' % path)
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    patch = external.find_executable('patch')
    if patch is None:
        sys.exit("""\
Error:
    Did not find 'patch' in: %s
    You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
    or conda, m2-patch (Windows),
""" % (os.pathsep.join(external.dir_paths)))
    files = _source_files_from_patch_file(path)
    patch_strip_level = _guess_patch_strip_level(files, src_dir)
    patch_args = ['-p%d' % patch_strip_level, '-i', path]
    if sys.platform == 'win32':
        patch_args[-1] = _ensure_unix_line_endings(path)
    check_call([patch] + patch_args, cwd=src_dir)
    if sys.platform == 'win32' and os.path.exists(patch_args[-1]):
        os.remove(patch_args[-1])  # clean up .patch_unix file
Ejemplo n.º 48
0
def meta_vars(meta):
    d = {}
    for var_name in meta.get_value('build/script_env', []):
        value = os.getenv(var_name)
        if value is None:
            warnings.warn(
                "The environment variable '%s' is undefined." % var_name,
                UserWarning
            )
        else:
            d[var_name] = value

    git_dir = join(source.get_dir(), '.git')
    if not isinstance(git_dir, str):
        # On Windows, subprocess env can't handle unicode.
        git_dir = git_dir.encode(sys.getfilesystemencoding() or 'utf-8')

    if external.find_executable('git') and os.path.exists(git_dir):
        git_url = meta.get_value('source/git_url')

        if os.path.exists(git_url):
            # If git_url is a relative path instead of a url, convert it to an abspath
            git_url = normpath(join(meta.path, git_url))

        _x = False

        if git_url:
            _x = verify_git_repo(git_dir,
                                 git_url,
                                 meta.get_value('source/git_rev', 'HEAD'))

        if _x or meta.get_value('source/path'):
            d.update(get_git_info(git_dir))

    d['PKG_NAME'] = meta.name()
    d['PKG_VERSION'] = meta.version()
    d['PKG_BUILDNUM'] = str(meta.build_number())
    d['PKG_BUILD_STRING'] = str(meta.build_id())
    d['RECIPE_DIR'] = meta.path
    return d
Ejemplo n.º 49
0
def hg_source(meta):
    ''' Download a source from Mercurial repo. '''
    hg = external.find_executable('hg')
    if not hg:
        sys.exit('Error: hg not installed')
    hg_url = meta['hg_url']
    if not isdir(HG_CACHE):
        os.makedirs(HG_CACHE)
    hg_dn = hg_url.split(':')[-1].replace('/', '_')
    cache_repo = join(HG_CACHE, hg_dn)
    if isdir(cache_repo):
        check_call([hg, 'pull'], cwd=cache_repo)
    else:
        check_call([hg, 'clone', hg_url, cache_repo])
        assert isdir(cache_repo)

    # now clone in to work directory
    update = meta.get('hg_tag') or 'tip'
    print('checkout: %r' % update)

    check_call([hg, 'clone', cache_repo, WORK_DIR])
    check_call([hg, 'update', '-C', update], cwd=WORK_DIR)
    return WORK_DIR
Ejemplo n.º 50
0
def build(m,
          post=None,
          include_recipe=True,
          keep_old_work=False,
          need_source_download=True,
          need_reparse_in_env=False,
          verbose=True,
          dirty=False,
          activate=True,
          debug=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()) and not on_win:
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if m.skip():
        print("Skipped: The %s recipe defines build/skip for this "
              "configuration." % m.dist())
        return

    with Locked(cc.root_dir):

        # If --keep-old-work, then move the contents of source.WORK_DIR to a
        # temporary directory for the duration of the build.
        # The source unpacking procedure is too varied and complex
        # to allow this to be written cleanly (see source.get_dir() for example)
        if keep_old_work:
            old_WORK_DIR = tempfile.mkdtemp()
            old_sub_dirs = [
                name for name in os.listdir(source.WORK_DIR)
                if os.path.isdir(os.path.join(source.WORK_DIR, name))
            ]
            if len(old_sub_dirs):
                print("Keeping old work directory backup: %s => %s" %
                      (old_sub_dirs, old_WORK_DIR))
                for old_sub in old_sub_dirs:
                    shutil.move(os.path.join(source.WORK_DIR, old_sub),
                                old_WORK_DIR)

        if post in [False, None]:
            print("Removing old build environment")
            print("BUILD START:", m.dist())
            if not need_source_download or not need_reparse_in_env:
                print(
                    "    (actual version deferred until further download or env creation)"
                )
            if on_win:
                if isdir(config.short_build_prefix):
                    move_to_trash(config.short_build_prefix, '')
                if isdir(config.long_build_prefix):
                    move_to_trash(config.long_build_prefix, '')
            else:
                rm_rf(config.short_build_prefix)
                rm_rf(config.long_build_prefix)

            specs = [ms.spec for ms in m.ms_depends('build')]
            if activate:
                # If we activate the build envrionment, we need to be sure that we
                #    have the appropriate VCS available in the environment.  People
                #    are not used to explicitly listing it in recipes, though.
                #    We add it for them here, but warn them about it.
                vcs_source = m.uses_vcs_in_build()
                if vcs_source and vcs_source not in specs:
                    vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source
                    has_vcs_available = os.path.isfile(
                        external.find_executable(vcs_executable) or "")
                    if not has_vcs_available:
                        if (vcs_source != "mercurial" or not any(
                                spec.startswith('python') and "3." in spec
                                for spec in specs)):
                            specs.append(vcs_source)

                            log.warn(
                                "Your recipe depends on {} at build time (for templates), "
                                "but you have not listed it as a build dependency.  Doing "
                                "so for this build.")
                        else:
                            raise ValueError(
                                "Your recipe uses mercurial in build, but mercurial"
                                " does not yet support Python 3.  Please handle all of "
                                "your mercurial actions outside of your build script."
                            )
            # Display the name only
            # Version number could be missing due to dependency on source info.
            create_env(config.build_prefix, specs, debug=debug)

            if need_source_download:
                # Execute any commands fetching the source (e.g., git) in the _build environment.
                # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
                # dependencies.
                if not activate:
                    _old_path = os.environ['PATH']
                    os.environ['PATH'] = prepend_bin_path(
                        {'PATH': _old_path}, config.build_prefix)['PATH']
                try:
                    m, need_source_download, need_reparse_in_env = parse_or_try_download(
                        m,
                        no_download_source=False,
                        force_download=True,
                        verbose=verbose,
                        dirty=dirty)
                    assert not need_source_download, "Source download failed.  Please investigate."
                finally:
                    if not activate:
                        os.environ['PATH'] = _old_path
                print("BUILD START:", m.dist())

            if need_reparse_in_env:
                reparse(m)
                print("BUILD START:", m.dist())

            if m.name() in [
                    i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
            ]:
                print("%s is installed as a build dependency. Removing." %
                      m.name())
                index = get_build_index(clear_cache=False)
                actions = plan.remove_actions(config.build_prefix, [m.name()],
                                              index=index)
                assert not plan.nothing_to_do(actions), actions
                plan.display_actions(actions, index)
                plan.execute_actions(actions, index)

            print("Package:", m.dist())

            src_dir = source.get_dir()
            if isdir(source.WORK_DIR):
                print("source tree in:", src_dir)
            else:
                print("no source - creating empty work folder")
                os.makedirs(source.WORK_DIR)

            rm_rf(config.info_dir)
            files1 = prefix_files()
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    log.warn(
                        "Glob %s from always_include_files does not match any files"
                        % pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if isdir(source.WORK_DIR):
                if on_win:
                    build_file = join(m.path, 'bld.bat')
                    if script:
                        build_file = join(source.get_dir(), 'bld.bat')
                        with open(join(source.get_dir(), 'bld.bat'),
                                  'w') as bf:
                            bf.write(script)
                    import conda_build.windows as windows
                    windows.build(m,
                                  build_file,
                                  dirty=dirty,
                                  activate=activate)
                else:
                    build_file = join(m.path, 'build.sh')

                    # There is no sense in trying to run an empty build script.
                    if isfile(build_file) or script:
                        env = environ.get_dict(m, dirty=dirty)
                        work_file = join(source.get_dir(), 'conda_build.sh')
                        if script:
                            with open(work_file, 'w') as bf:
                                bf.write(script)
                        if activate:
                            if isfile(build_file):
                                data = open(build_file).read()
                            else:
                                data = open(work_file).read()
                            with open(work_file, 'w') as bf:
                                bf.write(
                                    "source activate {build_prefix}\n".format(
                                        build_prefix=config.build_prefix))
                                bf.write(data)
                        else:
                            if not isfile(work_file):
                                shutil.copy(build_file, work_file)
                        os.chmod(work_file, 0o766)

                        if isfile(work_file):
                            cmd = [shell_path, '-x', '-e', work_file]

                            _check_call(cmd, env=env, cwd=src_dir)

        if post in [True, None]:
            if post:
                with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                    files1 = set(f.read().splitlines())

            get_build_metadata(m)
            create_post_scripts(m)
            create_entry_points(m.get_value('build/entry_points'))
            assert not exists(config.info_dir)
            files2 = prefix_files()

            post_process(sorted(files2 - files1),
                         preserve_egg_dir=bool(
                             m.get_value('build/preserve_egg_dir')))

            # The post processing may have deleted some files (like easy-install.pth)
            files2 = prefix_files()
            if any(config.meta_dir in join(config.build_prefix, f)
                   for f in files2 - files1):
                sys.exit(
                    indent(
                        """Error: Untracked file(s) %s found in conda-meta directory.
    This error usually comes from using conda in the build script.  Avoid doing this, as it
    can lead to packages that include their dependencies.""" %
                        (tuple(f for f in files2 - files1 if config.meta_dir in
                               join(config.build_prefix, f)), )))
            post_build(m, sorted(files2 - files1))
            create_info_files(m,
                              sorted(files2 - files1),
                              include_recipe=bool(m.path) and include_recipe)
            if m.get_value('build/noarch_python'):
                import conda_build.noarch_python as noarch_python
                noarch_python.transform(m, sorted(files2 - files1))

            files3 = prefix_files()
            fix_permissions(files3 - files1)

            path = bldpkg_path(m)
            t = tarfile.open(path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            print("BUILD END:", m.dist())

            # we're done building, perform some checks
            tarcheck.check_all(path)
            update_index(config.bldpkgs_dir)
        else:
            print("STOPPING BUILD BEFORE POST:", m.dist())

        if keep_old_work and len(old_sub_dirs):
            print("Restoring old work directory backup: %s :: %s => %s" %
                  (old_WORK_DIR, old_sub_dirs, source.WORK_DIR))
            for old_sub in old_sub_dirs:
                if os.path.exists(os.path.join(source.WORK_DIR, old_sub)):
                    print(
                        "Not restoring old source directory %s over new build's version"
                        % (old_sub))
                else:
                    shutil.move(os.path.join(old_WORK_DIR, old_sub),
                                source.WORK_DIR)
            shutil.rmtree(old_WORK_DIR, ignore_errors=True)
Ejemplo n.º 51
0
def git_source(meta, recipe_dir, verbose=False):
    ''' Download a source from Git repo. '''
    if verbose:
        stdout = None
    else:
        FNULL = open(os.devnull, 'w')
        stdout = FNULL

    if not isdir(GIT_CACHE):
        os.makedirs(GIT_CACHE)

    git = external.find_executable('git')
    if not git:
        sys.exit("Error: git is not installed")
    git_url = meta['git_url']
    git_depth = int(meta.get('git_depth', -1))
    if git_url.startswith('.'):
        # It's a relative path from the conda recipe
        os.chdir(recipe_dir)
        git_dn = abspath(expanduser(git_url))
        git_dn = "_".join(git_dn.split(os.path.sep)[1:])
    else:
        git_dn = git_url.split(':')[-1].replace('/', '_')
    cache_repo = cache_repo_arg = join(GIT_CACHE, git_dn)
    if sys.platform == 'win32':
        is_cygwin = 'cygwin' in git.lower()
        cache_repo_arg = cache_repo_arg.replace('\\', '/')
        if is_cygwin:
            cache_repo_arg = '/cygdrive/c/' + cache_repo_arg[3:]

    # update (or create) the cache repo
    if isdir(cache_repo):
        if meta.get('git_rev', 'HEAD') != 'HEAD':
            check_call([git, 'fetch'], cwd=cache_repo, stdout=stdout)
        else:
            # Unlike 'git clone', fetch doesn't automatically update the cache's HEAD,
            # So here we explicitly store the remote HEAD in the cache's local refs/heads,
            # and then explicitly set the cache's HEAD.
            # This is important when the git repo is a local path like "git_url: ../",
            # but the user is working with a branch other than 'master' without
            # explicitly providing git_rev.
            check_call(
                [git, 'fetch', 'origin', '+HEAD:_conda_cache_origin_head'],
                cwd=cache_repo,
                stdout=stdout)
            check_call([
                git, 'symbolic-ref', 'HEAD',
                'refs/heads/_conda_cache_origin_head'
            ],
                       cwd=cache_repo,
                       stdout=stdout)
    else:
        args = [git, 'clone', '--mirror']
        if git_depth > 0:
            args += ['--depth', str(git_depth)]

        check_call(args + [git_url, cache_repo_arg], stdout=stdout)
        assert isdir(cache_repo)

    # now clone into the work directory
    checkout = meta.get('git_rev')
    # if rev is not specified, and the git_url is local,
    # assume the user wants the current HEAD
    if not checkout and git_url.startswith('.'):
        process = Popen(["git", "rev-parse", "HEAD"], stdout=PIPE, cwd=git_url)
        output = process.communicate()[0].strip()
        checkout = output.decode('utf-8')
    if checkout and verbose:
        print('checkout: %r' % checkout)

    check_call([git, 'clone', cache_repo_arg, WORK_DIR], stdout=stdout)
    if checkout:
        check_call([git, 'checkout', checkout], cwd=WORK_DIR, stdout=stdout)

    # Submodules must be updated after checkout.
    check_call([git, 'submodule', 'update', '--init', '--recursive'],
               cwd=WORK_DIR,
               stdout=stdout)

    git_info(verbose=verbose)

    if not verbose:
        FNULL.close()

    return WORK_DIR