Ejemplo n.º 1
0
def get_git_build_info(src_dir):
    env = os.environ.copy()
    d = {}
    git_dir = join(src_dir, '.git')
    if os.path.exists(git_dir):
        env['GIT_DIR'] = git_dir
    else:
        return d

    # grab information from describe
    key_name = lambda a: "GIT_DESCRIBE_{}".format(a)
    keys = [key_name("TAG"), key_name("NUMBER"), key_name("HASH")]
    env = {str(key): str(value) for key, value in env.items()}
    output, _ = utils.execute(["git", "describe", "--tags", "--long", "HEAD"],
                              env=env)
    parts = output.strip().rsplit('-', 2)
    parts_length = len(parts)
    if parts_length == 3:
        d.update(dict(zip(keys, parts)))
    # get the _full_ hash of the current HEAD
    output, _ = utils.execute(["git", "rev-parse", "HEAD"],
                              env=env)

    d['GIT_FULL_HASH'] = output.strip()
    # set up the build string
    if key_name('NUMBER') in d and key_name('HASH') in d:
        d['GIT_BUILD_STR'] = '{}_{}'.format(d[key_name('NUMBER')],
                                            d[key_name('HASH')])

    return d
def svn_source(meta, SVN_CACHE):
    ''' Download a source from SVN repo. '''
    def parse_bool(s):
        return str(s).lower().strip() in ('yes', 'true', '1', 'on')

    svn = external.find_executable('svn')
    if not svn:
        sys.exit("Error: svn is not installed")
    svn_url = meta['svn_url']
    svn_revision = meta.get('svn_rev') or 'head'
    svn_ignore_externals = parse_bool(meta.get('svn_ignore_externals') or 'no')
    if not isdir(SVN_CACHE):
        os.makedirs(SVN_CACHE)
    svn_dn = svn_url.split(':', 1)[-1].replace('/', '_').replace(':', '_')
    cache_repo = join(SVN_CACHE, svn_dn)
    if svn_ignore_externals:
        extra_args = ['--ignore-externals']
    else:
        extra_args = []
    if isdir(cache_repo):
        execute([svn, 'up', '-r', svn_revision] + extra_args,
                cwd=cache_repo, check_exit_code=True)
    else:
        execute([svn, 'co', '-r', svn_revision] + extra_args +
                [svn_url, cache_repo], check_exit_code=True)
        assert isdir(cache_repo)
def git_source(meta, recipe_dir, GIT_CACHE):
    if not isdir(GIT_CACHE):
        os.makedirs(GIT_CACHE)

    git = external.find_executable('git')
    if not git:
        sys.exit("Error: git is not installed")
    git_url = meta['git_url']
    if git_url.startswith('.'):
        # It's a relative path from the conda recipe
        os.chdir(recipe_dir)
        git_dn = abspath(expanduser(git_url))
        git_dn = "_".join(git_dn.split(os.path.sep)[1:])
    else:
        git_dn = git_url.split(':')[-1].replace('/', '_')
    cache_repo = cache_repo_arg = join(GIT_CACHE, git_dn)

    # update (or create) the cache repo
    print('Fetch {}'.format(git_url))
    if isdir(cache_repo):
        execute([git, 'fetch'], cwd=cache_repo, check_exit_code=True)
    else:
        execute([git, 'clone', '--mirror', git_url, cache_repo_arg],
                cwd=recipe_dir, check_exit_code=True)
        assert isdir(cache_repo)
Ejemplo n.º 4
0
def get_all_dependencies(package, version):
    import conda.config
    prefix = os.path.join(conda.config.default_prefix, 'envs', '_pipbuild_')
    cmd1 = "conda create -n _pipbuild_ --yes python pip"
    print(cmd1)
    utils.execute(cmd1.split())
    cmd2 = "%s/bin/pip install %s==%s" % (prefix, package, version)
    print(cmd2)
    try:
        execute(cmd2.split(), check_exit_code=True)
    except subprocess.CalledProcessError:
        raise RuntimeError("Could not pip install %s==%s" % (package, version))

    cmd3args = ['%s/bin/python' % prefix, '__tmpfile__.py']
    fid = open('__tmpfile__.py', 'w')
    fid.write("import pkg_resources;\n")
    fid.write("reqs = pkg_resources.get_distribution('%s').requires();\n" %
              package)
    fid.write("print([(req.key, req.specs) for req in reqs])\n")
    fid.close()
    print("Getting dependencies...")
    output, _ = utils.execute(cmd3args, check_exit_code=True)
    deps = eval(output)
    os.unlink('__tmpfile__.py')
    depends = []
    for dep in deps:
        if len(dep[1]) == 2 and dep[1][0] == '==':
            depends.append(dep[0] + ' ' + dep[1][1])
        else:
            depends.append(dep[0])
    cmd4 = "conda remove -n _pipbuild_ --yes --all"
    utils.execute(cmd4.split())
    return depends
Ejemplo n.º 5
0
Archivo: post.py Proyecto: Vasyka/hat
def mk_relative_linux(f, rpaths=('lib',)):
    path = join(config.build_prefix, f)
    rpath = ':'.join('$ORIGIN/' + utils.relative(f, d) if not
        d.startswith('/') else d for d in rpaths)
    patchelf = external.find_executable('patchelf')
    print('patchelf: file: %s\n    setting rpath to: %s' % (path, rpath))
    utils.execute([patchelf, '--force-rpath', '--set-rpath', rpath, path])
Ejemplo n.º 6
0
def get_git_build_info(src_dir):
    env = os.environ.copy()
    d = {}
    git_dir = join(src_dir, '.git')
    if os.path.exists(git_dir):
        env['GIT_DIR'] = git_dir
    else:
        return d

    # grab information from describe
    key_name = lambda a: "GIT_DESCRIBE_{}".format(a)
    keys = [key_name("TAG"), key_name("NUMBER"), key_name("HASH")]
    env = {str(key): str(value) for key, value in env.items()}
    output, _ = utils.execute(["git", "describe", "--tags", "--long", "HEAD"],
                              env=env)
    parts = output.strip().rsplit('-', 2)
    parts_length = len(parts)
    if parts_length == 3:
        d.update(dict(zip(keys, parts)))
    # get the _full_ hash of the current HEAD
    output, _ = utils.execute(["git", "rev-parse", "HEAD"], env=env)

    d['GIT_FULL_HASH'] = output.strip()
    # set up the build string
    if key_name('NUMBER') in d and key_name('HASH') in d:
        d['GIT_BUILD_STR'] = '{}_{}'.format(d[key_name('NUMBER')],
                                            d[key_name('HASH')])

    return d
Ejemplo n.º 7
0
def build_package(package, version=None, noarch_python=False):
    if ' ' in package:
        package, version = package.split(' ')
    try:
        directory = build_recipe(package, version=version)
        dependencies = convert_recipe(directory, package,
                                      noarch_python=noarch_python)
    except RuntimeError:
        directory, dependencies = make_recipe(package, version,
                                              noarch_python=noarch_python)

    return_code = 0

    try:
        print("package = %s" % package)
        print("   dependencies = %s" % dependencies)
        # Dependencies will be either package_name or
        #  package_name version_number
        # Only == dependency specs get version numbers
        # All else are just handled without a version spec
        for depend in dependencies:
            build_package(depend)
        args = build_template.format(directory).split()
        print("Building conda package for {0}".format(package.lower()))

        try:
            utils.execute(args, check_exit_code=True)
        except subprocess.CalledProcessError as exc:
            return_code = exc.return_code
        else:
            m = MetaData(directory)
            handle_binstar_upload(build.bldpkg_path(m))
    finally:
        rm_rf(directory)
    return return_code
Ejemplo n.º 8
0
def get_all_dependencies(package, version):
    import conda.config
    prefix = os.path.join(conda.config.default_prefix, 'envs', '_pipbuild_')
    cmd1 = "conda create -n _pipbuild_ --yes python pip"
    print(cmd1)
    utils.execute(cmd1.split())
    cmd2 = "%s/bin/pip install %s==%s" % (prefix, package, version)
    print(cmd2)
    try:
        execute(cmd2.split(), check_exit_code=True)
    except subprocess.CalledProcessError:
        raise RuntimeError("Could not pip install %s==%s" % (package, version))

    cmd3args = ['%s/bin/python' % prefix, '__tmpfile__.py']
    fid = open('__tmpfile__.py', 'w')
    fid.write("import pkg_resources;\n")
    fid.write("reqs = pkg_resources.get_distribution('%s').requires();\n" %
              package)
    fid.write("print([(req.key, req.specs) for req in reqs])\n")
    fid.close()
    print("Getting dependencies...")
    output, _ = utils.execute(cmd3args, check_exit_code=True)
    deps = eval(output)
    os.unlink('__tmpfile__.py')
    depends = []
    for dep in deps:
        if len(dep[1]) == 2 and dep[1][0] == '==':
            depends.append(dep[0] + ' ' + dep[1][1])
        else:
            depends.append(dep[0])
    cmd4 = "conda remove -n _pipbuild_ --yes --all"
    utils.execute(cmd4.split())
    return depends
Ejemplo n.º 9
0
def mk_relative_linux(f, rpaths=('lib', )):
    path = join(config.build_prefix, f)
    rpath = ':'.join('$ORIGIN/' +
                     utils.relative(f, d) if not d.startswith('/') else d
                     for d in rpaths)
    patchelf = external.find_executable('patchelf')
    print('patchelf: file: %s\n    setting rpath to: %s' % (path, rpath))
    utils.execute([patchelf, '--force-rpath', '--set-rpath', rpath, path])
Ejemplo n.º 10
0
Archivo: source.py Proyecto: Vasyka/hat
def hg_source(meta):
    ''' Download a source from Mercurial repo. '''
    hg = external.find_executable('hg')
    if not hg:
        sys.exit('Error: hg not installed')
    hg_url = meta['hg_url']
    if not isdir(HG_CACHE):
        os.makedirs(HG_CACHE)
    hg_dn = hg_url.split(':')[-1].replace('/', '_')
    cache_repo = join(HG_CACHE, hg_dn)
    if isdir(cache_repo):
        execute([hg, 'pull'], cwd=cache_repo, check_exit_code=True)
    else:
        execute([hg, 'clone', hg_url, cache_repo], check_exit_code=True)
        assert isdir(cache_repo)

    # now clone in to work directory
    update = meta.get('hg_tag') or 'tip'
    print('checkout: %r' % update)

    execute([hg, 'clone', cache_repo, WORK_DIR],
            check_exit_code=True)
    execute([hg, 'update', '-C', update], cwd=WORK_DIR,
            check_exit_code=True)
    return WORK_DIR
Ejemplo n.º 11
0
def git_source(meta, recipe_dir):
    ''' Download a source from Git repo. '''
    if not isdir(GIT_CACHE):
        os.makedirs(GIT_CACHE)

    git = external.find_executable('git')
    if not git:
        sys.exit("Error: git is not installed")
    git_url = meta['git_url']
    if git_url.startswith('.'):
        # It's a relative path from the conda recipe
        os.chdir(recipe_dir)
        git_dn = abspath(expanduser(git_url))
        git_dn = "_".join(git_dn.split(os.path.sep)[1:])
    else:
        git_dn = git_url.split(':')[-1].replace('/', '_')
    cache_repo = cache_repo_arg = join(GIT_CACHE, git_dn)
    if sys.platform == 'win32':
        is_cygwin = 'cygwin' in git.lower()
        cache_repo_arg = cache_repo_arg.replace('\\', '/')
        if is_cygwin:
            cache_repo_arg = '/cygdrive/c/' + cache_repo_arg[3:]

    # update (or create) the cache repo
    if isdir(cache_repo):
        execute([git, 'fetch'], cwd=cache_repo, check_exit_code=True)
    else:
        execute([git, 'clone', '--mirror', git_url, cache_repo_arg],
                cwd=recipe_dir,
                check_exit_code=True)
        assert isdir(cache_repo)

    # now clone into the work directory
    checkout = meta.get('git_rev')
    # if rev is not specified, and the git_url is local,
    # assume the user wants the current HEAD
    if not checkout and git_url.startswith('.'):
        stdout, _ = execute(["git", "rev-parse", "HEAD"], cwd=git_url)
        checkout = stdout.strip()

    if checkout:
        print('checkout: %r' % checkout)

    execute([git, 'clone', '--recursive', cache_repo_arg, WORK_DIR],
            check_exit_code=True)
    if checkout:
        execute([git, 'checkout', checkout],
                cwd=WORK_DIR,
                check_exit_code=True)

    git_info()
    return WORK_DIR
Ejemplo n.º 12
0
Archivo: source.py Proyecto: Vasyka/hat
def git_source(meta, recipe_dir):
    ''' Download a source from Git repo. '''
    if not isdir(GIT_CACHE):
        os.makedirs(GIT_CACHE)

    git = external.find_executable('git')
    if not git:
        sys.exit("Error: git is not installed")
    git_url = meta['git_url']
    if git_url.startswith('.'):
        # It's a relative path from the conda recipe
        os.chdir(recipe_dir)
        git_dn = abspath(expanduser(git_url))
        git_dn = "_".join(git_dn.split(os.path.sep)[1:])
    else:
        git_dn = git_url.split(':')[-1].replace('/', '_')
    cache_repo = cache_repo_arg = join(GIT_CACHE, git_dn)
    if sys.platform == 'win32':
        is_cygwin = 'cygwin' in git.lower()
        cache_repo_arg = cache_repo_arg.replace('\\', '/')
        if is_cygwin:
            cache_repo_arg = '/cygdrive/c/' + cache_repo_arg[3:]

    # update (or create) the cache repo
    if isdir(cache_repo):
        execute([git, 'fetch'], cwd=cache_repo, check_exit_code=True)
    else:
        execute([git, 'clone', '--mirror', git_url, cache_repo_arg],
                cwd=recipe_dir, check_exit_code=True)
        assert isdir(cache_repo)

    # now clone into the work directory
    checkout = meta.get('git_rev')
    # if rev is not specified, and the git_url is local,
    # assume the user wants the current HEAD
    if not checkout and git_url.startswith('.'):
        stdout, _ = execute(["git", "rev-parse", "HEAD"], cwd=git_url)
        checkout = stdout.strip()

    if checkout:
        print('checkout: %r' % checkout)

    execute([git, 'clone', '--recursive', cache_repo_arg, WORK_DIR],
            check_exit_code=True)
    if checkout:
        execute([git, 'checkout', checkout],
                cwd=WORK_DIR, check_exit_code=True)

    git_info()
    return WORK_DIR
Ejemplo n.º 13
0
def human_filetype(path):
    output, _ = utils.execute(['otool', '-h', path], check_exit_code=True)
    lines = output.splitlines()
    assert lines[0].startswith(path), path

    for line in lines:
        if line.strip().startswith('0x'):
            header = line.split()
            filetype = int(header[4])
            return FILETYPE[filetype][3:]
Ejemplo n.º 14
0
def otool(path):
    "thin wrapper around otool -L"
    output, _ = utils.execute(['otool', '-L', path], check_exit_code=True)
    lines = output.splitlines()
    assert lines[0].startswith(path), path
    res = []
    for line in lines[1:]:
        assert line[0] == '\t', path
        res.append(line.split()[0])
    return res
Ejemplo n.º 15
0
Archivo: macho.py Proyecto: Vasyka/hat
def human_filetype(path):
    output, _ = utils.execute(['otool', '-h', path], check_exit_code=True)
    lines = output.splitlines()
    assert lines[0].startswith(path), path

    for line in lines:
        if line.strip().startswith('0x'):
            header = line.split()
            filetype = int(header[4])
            return FILETYPE[filetype][3:]
Ejemplo n.º 16
0
Archivo: macho.py Proyecto: Vasyka/hat
def otool(path):
    "thin wrapper around otool -L"
    output, _ = utils.execute(['otool', '-L', path], check_exit_code=True)
    lines = output.splitlines()
    assert lines[0].startswith(path), path
    res = []
    for line in lines[1:]:
        assert line[0] == '\t', path
        res.append(line.split()[0])
    return res
Ejemplo n.º 17
0
Archivo: source.py Proyecto: Vasyka/hat
def apply_patch(src_dir, path):
    print('Applying patch: %r' % path)
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    patch = external.find_executable('patch')
    if patch is None:
        sys.exit("""\
Error:
    Did not find 'patch' in: %s
    You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
    or conda, cygwin (Windows),
""" % (os.pathsep.join(external.dir_paths)))
    patch_args = ['-p0', '-i', path]
    if sys.platform == 'win32':
        patch_args[-1] =  _ensure_unix_line_endings(path)

    execute([patch, ] + patch_args, cwd=src_dir, check_exit_code=True)
    if sys.platform == 'win32' and os.path.exists(patch_args[-1]):
        os.remove(patch_args[-1])  # clean up .patch_unix file
Ejemplo n.º 18
0
Archivo: macho.py Proyecto: Vasyka/hat
def get_rpaths(path):
    output, _ = utils.execute(['otool', '-l', path], check_exit_code=True)
    lines = output.splitlines()
    check_for_rpath = False
    rpaths = []
    for line in lines:
        if 'cmd LC_RPATH' in line:
            check_for_rpath = True
        if check_for_rpath and 'path' in line:
            _, rpath, _ = line.split(None, 2)
            rpaths.append(rpath)
    return rpaths
Ejemplo n.º 19
0
def get_rpaths(path):
    output, _ = utils.execute(['otool', '-l', path], check_exit_code=True)
    lines = output.splitlines()
    check_for_rpath = False
    rpaths = []
    for line in lines:
        if 'cmd LC_RPATH' in line:
            check_for_rpath = True
        if check_for_rpath and 'path' in line:
            _, rpath, _ = line.split(None, 2)
            rpaths.append(rpath)
    return rpaths
Ejemplo n.º 20
0
def apply_patch(src_dir, path):
    print('Applying patch: %r' % path)
    if not isfile(path):
        sys.exit('Error: no such patch: %s' % path)

    patch = external.find_executable('patch')
    if patch is None:
        sys.exit("""\
Error:
    Did not find 'patch' in: %s
    You can install 'patch' using apt-get, yum (Linux), Xcode (MacOSX),
    or conda, cygwin (Windows),
""" % (os.pathsep.join(external.dir_paths)))
    patch_args = ['-p0', '-i', path]
    if sys.platform == 'win32':
        patch_args[-1] = _ensure_unix_line_endings(path)

    execute([
        patch,
    ] + patch_args, cwd=src_dir, check_exit_code=True)
    if sys.platform == 'win32' and os.path.exists(patch_args[-1]):
        os.remove(patch_args[-1])  # clean up .patch_unix file
Ejemplo n.º 21
0
def build_package(package, version=None, noarch_python=False):
    if ' ' in package:
        package, version = package.split(' ')
    try:
        directory = build_recipe(package, version=version)
        dependencies = convert_recipe(directory,
                                      package,
                                      noarch_python=noarch_python)
    except RuntimeError:
        directory, dependencies = make_recipe(package,
                                              version,
                                              noarch_python=noarch_python)

    return_code = 0

    try:
        print("package = %s" % package)
        print("   dependencies = %s" % dependencies)
        # Dependencies will be either package_name or
        #  package_name version_number
        # Only == dependency specs get version numbers
        # All else are just handled without a version spec
        for depend in dependencies:
            build_package(depend)
        args = build_template.format(directory).split()
        print("Building conda package for {0}".format(package.lower()))

        try:
            utils.execute(args, check_exit_code=True)
        except subprocess.CalledProcessError as exc:
            return_code = exc.return_code
        else:
            m = MetaData(directory)
            handle_binstar_upload(build.bldpkg_path(m))
    finally:
        rm_rf(directory)
    return return_code
Ejemplo n.º 22
0
def get_latest_git_tag():
    return_code = 0
    try:
        stdout, stderr = utils.execute(['git', 'tag'], cwd=source.WORK_DIR)
    except subprocess.CalledProcessError as exc:
        return_code = exc.return_code
        stdout, stderr = exc.output

    if stderr or return_code:
        sys.exit("Error: git tag failed (%s)" % stderr)
    tags = stdout.strip().splitlines()
    if not tags:
        sys.exit("Error: no tags found")

    print("Using tag %s" % tags[-1])
    return tags[-1]
Ejemplo n.º 23
0
def get_latest_git_tag():
    return_code = 0
    try:
        stdout, stderr = utils.execute(['git', 'tag'], cwd=source.WORK_DIR)
    except subprocess.CalledProcessError as exc:
        return_code = exc.return_code
        stdout, stderr = exc.output

    if stderr or return_code:
        sys.exit("Error: git tag failed (%s)" % stderr)
    tags = stdout.strip().splitlines()
    if not tags:
        sys.exit("Error: no tags found")

    print("Using tag %s" % tags[-1])
    return tags[-1]
Ejemplo n.º 24
0
Archivo: macho.py Proyecto: Vasyka/hat
def install_name_change(path, cb_func):
    """
    change dynamic shared library install names of Mach-O binary `path`.

    `cb_func` is a callback function which called for each shared library name.
    It is called with `path` and the current shared library install name,
    and return the new name (or None if the name should be unchanged).
    """
    changes = []
    for link in otool(path):
        # The first link may be the install name of the library itself, but
        # this isn't a big deal because install_name_tool -change is a no-op
        # if given a dependent install name that doesn't exist.
        new_link = cb_func(path, link)
        if new_link:
            changes.append((link, new_link))

    ret = True
    for old, new in changes:
        return_code = 0
        args = ['install_name_tool', '-change', old, new, path]
        print(' '.join(args))

        try:
            stdout, stderr = utils.execute(args, check_exit_code=True)
        except subprocess.CalledProcessError as exc:
            stdout, stderr = exc.output
            return_code = exc.return_code

        if "Mach-O dynamic shared library stub file" in stderr:
            print("Skipping Mach-O dynamic shared library stub file %s" % path)
            ret = False
            continue
        else:
            print(stderr, file=sys.stderr)

        if return_code:
            raise RuntimeError("install_name_tool failed with exit "
                               "status %d" % return_code)

    return ret
Ejemplo n.º 25
0
def install_name_change(path, cb_func):
    """
    change dynamic shared library install names of Mach-O binary `path`.

    `cb_func` is a callback function which called for each shared library name.
    It is called with `path` and the current shared library install name,
    and return the new name (or None if the name should be unchanged).
    """
    changes = []
    for link in otool(path):
        # The first link may be the install name of the library itself, but
        # this isn't a big deal because install_name_tool -change is a no-op
        # if given a dependent install name that doesn't exist.
        new_link = cb_func(path, link)
        if new_link:
            changes.append((link, new_link))

    ret = True
    for old, new in changes:
        return_code = 0
        args = ['install_name_tool', '-change', old, new, path]
        print(' '.join(args))

        try:
            stdout, stderr = utils.execute(args, check_exit_code=True)
        except subprocess.CalledProcessError as exc:
            stdout, stderr = exc.output
            return_code = exc.return_code

        if "Mach-O dynamic shared library stub file" in stderr:
            print("Skipping Mach-O dynamic shared library stub file %s" % path)
            ret = False
            continue
        else:
            print(stderr, file=sys.stderr)

        if return_code:
            raise RuntimeError("install_name_tool failed with exit "
                               "status %d" % return_code)

    return ret
Ejemplo n.º 26
0
def hg_source(meta):
    ''' Download a source from Mercurial repo. '''
    hg = external.find_executable('hg')
    if not hg:
        sys.exit('Error: hg not installed')
    hg_url = meta['hg_url']
    if not isdir(HG_CACHE):
        os.makedirs(HG_CACHE)
    hg_dn = hg_url.split(':')[-1].replace('/', '_')
    cache_repo = join(HG_CACHE, hg_dn)
    if isdir(cache_repo):
        execute([hg, 'pull'], cwd=cache_repo, check_exit_code=True)
    else:
        execute([hg, 'clone', hg_url, cache_repo], check_exit_code=True)
        assert isdir(cache_repo)

    # now clone in to work directory
    update = meta.get('hg_tag') or 'tip'
    print('checkout: %r' % update)

    execute([hg, 'clone', cache_repo, WORK_DIR], check_exit_code=True)
    execute([hg, 'update', '-C', update], cwd=WORK_DIR, check_exit_code=True)
    return WORK_DIR
Ejemplo n.º 27
0
Archivo: source.py Proyecto: Vasyka/hat
def git_info(fo=None):
    ''' Print info about a Git repo. '''
    assert isdir(WORK_DIR)

    # Ensure to explicitly set GIT_DIR as some Linux machines will not
    # properly execute without it.
    env = os.environ.copy()
    env['GIT_DIR'] = join(WORK_DIR, '.git')
    env = {str(key): str(value) for key, value in env.items()}
    for cmd, check_error in [('git log -n1', True),
                             ('git describe --tags --dirty', False),
                             ('git status', True)]:
        stdout, stderr = execute(cmd.split(), cwd=WORK_DIR, env=env,
                                 check_exit_code=check_error)
        if check_error and stderr and stderr.strip():
            raise Exception("git error: %s" % stderr)
        if fo:
            fo.write(u'==> %s <==\n' % cmd)
            fo.write(stdout + u'\n')
        else:
            print(u'==> %s <==\n' % cmd)
            print(stdout + u'\n')
Ejemplo n.º 28
0
def git_info(fo=None):
    ''' Print info about a Git repo. '''
    assert isdir(WORK_DIR)

    # Ensure to explicitly set GIT_DIR as some Linux machines will not
    # properly execute without it.
    env = os.environ.copy()
    env['GIT_DIR'] = join(WORK_DIR, '.git')
    env = {str(key): str(value) for key, value in env.items()}
    for cmd, check_error in [('git log -n1', True),
                             ('git describe --tags --dirty', False),
                             ('git status', True)]:
        stdout, stderr = execute(cmd.split(),
                                 cwd=WORK_DIR,
                                 env=env,
                                 check_exit_code=check_error)
        if check_error and stderr and stderr.strip():
            raise Exception("git error: %s" % stderr)
        if fo:
            fo.write(u'==> %s <==\n' % cmd)
            fo.write(stdout + u'\n')
        else:
            print(u'==> %s <==\n' % cmd)
            print(stdout + u'\n')
Ejemplo n.º 29
0
def build_recipe(package, version=None):
    if version:
        dirname = package.lower() + "-" + version
    else:
        dirname = package.lower()
    if os.path.isdir(dirname):
        rm_rf(dirname)
    if version is None:
        args = skeleton_template.format(package).split()
    else:
        args = skeleton_template_wversion.format(package, version).split()
    print("Creating standard recipe for {0}".format(dirname))
    try:
        result = utils.execute(args, check_exit_code=True)
    except subprocess.CalledProcessError as err:
        print(err.output)
        raise RuntimeError((" ".join(args)))

    output = result.strip().splitlines()
    if output[-1] == 'Done':
        direc = output[-2].split()[-1]
    else:
        raise RuntimeError("Incorrect output from build_recipe: %s" % output)
    return os.path.abspath(direc)
Ejemplo n.º 30
0
def build_recipe(package, version=None):
    if version:
        dirname = package.lower() + "-" + version
    else:
        dirname = package.lower()
    if os.path.isdir(dirname):
        rm_rf(dirname)
    if version is None:
        args = skeleton_template.format(package).split()
    else:
        args = skeleton_template_wversion.format(package, version).split()
    print("Creating standard recipe for {0}".format(dirname))
    try:
        result = utils.execute(args, check_exit_code=True)
    except subprocess.CalledProcessError as err:
        print(err.output)
        raise RuntimeError((" ".join(args)))

    output = result.strip().splitlines()
    if output[-1] == 'Done':
        direc = output[-2].split()[-1]
    else:
        raise RuntimeError("Incorrect output from build_recipe: %s" % output)
    return os.path.abspath(direc)
Ejemplo n.º 31
0
def main(args, parser):
    if len(args.packages) > 1 and args.version_compare:
        parser.error("--version-compare only works with one package at a time")
    if not args.update_outdated and not args.packages:
        parser.error("At least one package must be supplied")

    package_dicts = {}

    [output_dir] = args.output_dir

    cran_metadata = get_cran_metadata(args.cran_url, output_dir)

    if args.update_outdated:
        args.packages = get_outdated(output_dir, cran_metadata, args.packages)
        for pkg in args.packages:
            rm_rf(join(args.output_dir, 'r-' + pkg))

    while args.packages:
        package = args.packages.pop()

        is_github_url = 'github.com' in package
        url = package

        if is_github_url:
            rm_rf(source.WORK_DIR)
            source.git_source({'git_url': package}, '.')
            git_tag = args.git_tag[0] if args.git_tag else get_latest_git_tag()
            return_code = 0
            try:
                stdout, stderr = utils.execute(['git', 'checkout', git_tag],
                                               cwd=source.WORK_DIR)
            except subprocess.CalledProcessError as exc:
                return_code = exc.return_code
                stdout, stderr = exc.output

            if return_code:
                sys.exit(
                    "Error: 'git checkout %s' failed (%s).\nInvalid tag?" %
                    (git_tag, stderr.strip()))
            if stdout:
                print(stdout, file=sys.stdout)
            if stderr:
                print(stderr, file=sys.stderr)

            DESCRIPTION = join(source.WORK_DIR, "DESCRIPTION")
            if not isfile(DESCRIPTION):
                sub_description_pkg = join(source.WORK_DIR, 'pkg',
                                           "DESCRIPTION")
                sub_description_name = join(source.WORK_DIR,
                                            package.split('/')[-1],
                                            "DESCRIPTION")
                if isfile(sub_description_pkg):
                    DESCRIPTION = sub_description_pkg
                elif isfile(sub_description_name):
                    DESCRIPTION = sub_description_name
                else:
                    sys.exit(
                        "%s does not appear to be a valid R package (no DESCRIPTION file)"
                        % package)

            with open(DESCRIPTION) as f:
                description_text = clear_trailing_whitespace(f.read())

            d = dict_from_cran_lines(
                remove_package_line_continuations(
                    description_text.splitlines()))
            d['orig_description'] = description_text
            package = d['Package'].lower()
            cran_metadata[package] = d

        if package.startswith('r-'):
            package = package[2:]
        if package.endswith('/'):
            package = package[:-1]
        if package.lower() not in cran_metadata:
            sys.exit("Package %s not found" % package)

        # Make sure package is always uses the CRAN capitalization
        package = cran_metadata[package.lower()]['Package']

        if not is_github_url:
            session = get_session(output_dir)
            cran_metadata[package.lower()].update(
                get_package_metadata(args.cran_url, package, session))

        dir_path = join(output_dir, 'r-' + package.lower())
        if exists(dir_path) and not args.version_compare:
            raise RuntimeError("directory already exists: %s" % dir_path)

        cran_package = cran_metadata[package.lower()]

        d = package_dicts.setdefault(
            package,
            {
                'cran_packagename': package,
                'packagename': 'r-' + package.lower(),
                'build_depends': '',
                'run_depends': '',
                # CRAN doesn't seem to have this metadata :(
                'home_comment': '#',
                'homeurl': '',
                'summary_comment': '#',
                'summary': '',
            })

        if is_github_url:
            d['url_key'] = ''
            d['fn_key'] = ''
            d['git_url_key'] = 'git_url:'
            d['git_tag_key'] = 'git_tag:'
            d['filename'] = ''
            d['cranurl'] = ''
            d['git_url'] = url
            d['git_tag'] = git_tag
        else:
            d['url_key'] = 'url:'
            d['fn_key'] = 'fn:'
            d['git_url_key'] = ''
            d['git_tag_key'] = ''
            d['git_url'] = ''
            d['git_tag'] = ''

        if args.version:
            raise NotImplementedError(
                "Package versions from CRAN are not yet implemented")
            [version] = args.version
            d['version'] = version

        d['cran_version'] = cran_package['Version']
        # Conda versions cannot have -. Conda (verlib) will treat _ as a .
        d['conda_version'] = d['cran_version'].replace('-', '_')
        if args.version_compare:
            sys.exit(not version_compare(dir_path, d['conda_version']))

        if not is_github_url:
            d['filename'] = "{cran_packagename}_{cran_version}.tar.gz".format(
                **d)
            if args.archive:
                d['cranurl'] = (INDENT + args.cran_url + 'src/contrib/' +
                                d['filename'] + INDENT + args.cran_url +
                                'src/contrib/' + 'Archive/' +
                                d['cran_packagename'] + '/' + d['filename'])
            else:
                d['cranurl'] = ' ' + args.cran_url + 'src/contrib/' + d[
                    'filename']

        d['cran_metadata'] = '\n'.join(
            ['# %s' % l for l in cran_package['orig_lines'] if l])

        # XXX: We should maybe normalize these
        d['license'] = cran_package.get("License", "None")
        if 'License_is_FOSS' in cran_package:
            d['license'] += ' (FOSS)'
        if cran_package.get('License_restricts_use', None) == 'yes':
            d['license'] += ' (Restricts use)'

        if "URL" in cran_package:
            d['home_comment'] = ''
            d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL'])

        if 'Description' in cran_package:
            d['summary_comment'] = ''
            d['summary'] = ' ' + yaml_quote_string(cran_package['Description'])

        if "Suggests" in cran_package:
            d['suggests'] = "# Suggests: %s" % cran_package['Suggests']
        else:
            d['suggests'] = ''

        # Every package depends on at least R.
        # I'm not sure what the difference between depends and imports is.
        depends = [
            s.strip() for s in cran_package.get('Depends', '').split(',')
            if s.strip()
        ]
        imports = [
            s.strip() for s in cran_package.get('Imports', '').split(',')
            if s.strip()
        ]
        links = [
            s.strip() for s in cran_package.get("LinkingTo", '').split(',')
            if s.strip()
        ]

        dep_dict = {}

        for s in set(chain(depends, imports, links)):
            match = VERSION_DEPENDENCY_REGEX.match(s)
            if not match:
                sys.exit("Could not parse version from dependency of %s: %s" %
                         (package, s))
            name = match.group('name')
            archs = match.group('archs')
            relop = match.group('relop') or ''
            version = match.group('version') or ''
            version = version.replace('-', '_')
            # If there is a relop there should be a version
            assert not relop or version

            if archs:
                sys.exit("Don't know how to handle archs from dependency of "
                         "package %s: %s" % (package, s))

            dep_dict[name] = '{relop}{version}'.format(relop=relop,
                                                       version=version)

        if 'R' not in dep_dict:
            dep_dict['R'] = ''

        for dep_type in ['build', 'run']:
            deps = []
            for name in sorted(dep_dict):
                if name in R_BASE_PACKAGE_NAMES:
                    continue
                if name == 'R':
                    # Put R first
                    if d['cran_packagename'] in R_RECOMMENDED_PACKAGE_NAMES and dep_type == 'build':
                        # On Linux and OS X, r is a metapackage depending on
                        # r-base and r-recommended. Recommended packages cannot
                        # build depend on r as they would then build depend on
                        # themselves and the built package would end up being
                        # empty (because conda would find no new files)
                        r_name = 'r-base'
                    else:
                        r_name = 'r'
                    # We don't include any R version restrictions because we
                    # always build R packages against an exact R version
                    deps.insert(
                        0, '{indent}{r_name}'.format(indent=INDENT,
                                                     r_name=r_name))
                else:
                    conda_name = 'r-' + name.lower()

                    # The r package on Windows includes the recommended packages
                    if name in R_RECOMMENDED_PACKAGE_NAMES:
                        end = ' # [not win]'
                    else:
                        end = ''
                    if dep_dict[name]:
                        deps.append('{indent}{name} {version}{end}'.format(
                            name=conda_name,
                            version=dep_dict[name],
                            end=end,
                            indent=INDENT))
                    else:
                        deps.append('{indent}{name}{end}'.format(
                            name=conda_name, indent=INDENT, end=end))
                    if args.recursive:
                        if not exists(join(output_dir, conda_name)):
                            args.packages.append(name)

            if cran_package.get("NeedsCompilation", 'no') == 'yes':
                if dep_type == 'build':
                    deps.append(
                        '{indent}gcc # [not win]'.format(indent=INDENT))
                else:
                    deps.append(
                        '{indent}libgcc # [not win]'.format(indent=INDENT))
            d['%s_depends' % dep_type] = ''.join(deps)

    for package in package_dicts:
        d = package_dicts[package]
        name = d['packagename']

        #Normalize the metadata values
        d = {
            k: unicodedata.normalize("NFKD", compat.text_type(v)).encode(
                'ascii', 'ignore')
            for k, v in d.iteritems()
        }

        makedirs(join(output_dir, name))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, name, 'meta.yaml'), 'w') as f:
            f.write(clear_trailing_whitespace(CRAN_META.format(**d)))
        with open(join(output_dir, name, 'build.sh'), 'w') as f:
            f.write(CRAN_BUILD_SH.format(**d))
        with open(join(output_dir, name, 'bld.bat'), 'w') as f:
            f.write(CRAN_BLD_BAT.format(**d))

    print("Done")
Ejemplo n.º 32
0
Archivo: post.py Proyecto: Vasyka/hat
def mk_relative_osx(path, build_prefix=None):
    '''
    if build_prefix is None, then this is a standard conda build. The path
    and all dependencies are in the build_prefix.

    if package is built in develop mode, build_prefix is specified. Object
    specified by 'path' needs to relink runtime dependences to libs found in
    build_prefix/lib/. Also, in develop mode, 'path' is not in 'build_prefix'
    '''
    if build_prefix is None:
        assert path.startswith(config.build_prefix + '/')
    else:
        config.short_build_prefix = build_prefix

    assert sys.platform == 'darwin' and is_obj(path)
    s = macho.install_name_change(path, osx_ch_link)

    names = macho.otool(path)
    if names:
        # Strictly speaking, not all object files have install names (e.g.,
        # bundles and executables do not). In that case, the first name here
        # will not be the install name (i.e., the id), but it isn't a problem,
        # because in that case it will be a no-op (with the exception of stub
        # files, which give an error, which is handled below).
        args = [
            'install_name_tool',
            '-id',
            join('@rpath', relpath(dirname(path),
                                   join(config.build_prefix, 'lib')),
                 basename(names[0])),
            path,
        ]
        print(' '.join(args))
        return_code = 0
        try:
            stdout, stderr = utils.execute(args)
        except subprocess.CalledProcessError as exc:
            stdout, stderr = exc.output
            return_code = exc.return_code

        if "Mach-O dynamic shared library stub file" in stderr:
            print("Skipping Mach-O dynamic shared library stub file %s" % path)
            return

        else:
            print(stderr, file=sys.stderr)
            if return_code:
                raise RuntimeError("install_name_tool failed with exit "
                                   "status %d" % return_code)

        # Add an rpath to every executable to increase the chances of it
        # being found.
        args = [
            'install_name_tool',
            '-add_rpath',
            join('@loader_path',
                 relpath(join(config.build_prefix, 'lib'),
                         dirname(path)), '').replace('/./', '/'),
            path,
            ]
        print(' '.join(args))
        return_code = 0
        try:
            stdout, strerr = utils.execute(args)
        except subprocess.CalledProcessError as exc:
            stdout, stderr = exc.output
            return_code = exc.return_code

        if "Mach-O dynamic shared library stub file" in stderr:
            print("Skipping Mach-O dynamic shared library stub file %s\n" % path)
            return
        elif "would duplicate path, file already has LC_RPATH for:" in stderr:
            print("Skipping -add_rpath, file already has LC_RPATH set")
            return
        else:
            print(stderr, file=sys.stderr)
            if return_code:
                raise RuntimeError("install_name_tool failed with exit "
                                   "status %d" % return_code)

    if s:
        # Skip for stub files, which have to use binary_has_prefix_files to be
        # made relocatable.
        assert_relative_osx(path)
Ejemplo n.º 33
0
def mk_relative_osx(path, build_prefix=None):
    '''
    if build_prefix is None, then this is a standard conda build. The path
    and all dependencies are in the build_prefix.

    if package is built in develop mode, build_prefix is specified. Object
    specified by 'path' needs to relink runtime dependences to libs found in
    build_prefix/lib/. Also, in develop mode, 'path' is not in 'build_prefix'
    '''
    if build_prefix is None:
        assert path.startswith(config.build_prefix + '/')
    else:
        config.short_build_prefix = build_prefix

    assert sys.platform == 'darwin' and is_obj(path)
    s = macho.install_name_change(path, osx_ch_link)

    names = macho.otool(path)
    if names:
        # Strictly speaking, not all object files have install names (e.g.,
        # bundles and executables do not). In that case, the first name here
        # will not be the install name (i.e., the id), but it isn't a problem,
        # because in that case it will be a no-op (with the exception of stub
        # files, which give an error, which is handled below).
        args = [
            'install_name_tool',
            '-id',
            join('@rpath',
                 relpath(dirname(path), join(config.build_prefix, 'lib')),
                 basename(names[0])),
            path,
        ]
        print(' '.join(args))
        return_code = 0
        try:
            stdout, stderr = utils.execute(args)
        except subprocess.CalledProcessError as exc:
            stdout, stderr = exc.output
            return_code = exc.return_code

        if "Mach-O dynamic shared library stub file" in stderr:
            print("Skipping Mach-O dynamic shared library stub file %s" % path)
            return

        else:
            print(stderr, file=sys.stderr)
            if return_code:
                raise RuntimeError("install_name_tool failed with exit "
                                   "status %d" % return_code)

        # Add an rpath to every executable to increase the chances of it
        # being found.
        args = [
            'install_name_tool',
            '-add_rpath',
            join('@loader_path',
                 relpath(join(config.build_prefix, 'lib'), dirname(path)),
                 '').replace('/./', '/'),
            path,
        ]
        print(' '.join(args))
        return_code = 0
        try:
            stdout, strerr = utils.execute(args)
        except subprocess.CalledProcessError as exc:
            stdout, stderr = exc.output
            return_code = exc.return_code

        if "Mach-O dynamic shared library stub file" in stderr:
            print("Skipping Mach-O dynamic shared library stub file %s\n" %
                  path)
            return
        elif "would duplicate path, file already has LC_RPATH for:" in stderr:
            print("Skipping -add_rpath, file already has LC_RPATH set")
            return
        else:
            print(stderr, file=sys.stderr)
            if return_code:
                raise RuntimeError("install_name_tool failed with exit "
                                   "status %d" % return_code)

    if s:
        # Skip for stub files, which have to use binary_has_prefix_files to be
        # made relocatable.
        assert_relative_osx(path)
Ejemplo n.º 34
0
def main(args, parser):
    if len(args.packages) > 1 and args.version_compare:
        parser.error("--version-compare only works with one package at a time")
    if not args.update_outdated and not args.packages:
        parser.error("At least one package must be supplied")

    package_dicts = {}

    [output_dir] = args.output_dir

    cran_metadata = get_cran_metadata(args.cran_url, output_dir)

    if args.update_outdated:
        args.packages = get_outdated(output_dir, cran_metadata, args.packages)
        for pkg in args.packages:
            rm_rf(join(args.output_dir, 'r-' + pkg))


    while args.packages:
        package = args.packages.pop()

        is_github_url = 'github.com' in package
        url = package

        if is_github_url:
            rm_rf(source.WORK_DIR)
            source.git_source({'git_url': package}, '.')
            git_tag = args.git_tag[0] if args.git_tag else get_latest_git_tag()
            return_code = 0
            try:
                stdout, stderr = utils.execute(['git', 'checkout', git_tag],
                                               cwd=source.WORK_DIR)
            except subprocess.CalledProcessError as exc:
                return_code = exc.return_code
                stdout, stderr = exc.output

            if return_code:
                sys.exit("Error: 'git checkout %s' failed (%s).\nInvalid tag?" % (git_tag, stderr.strip()))
            if stdout:
                print(stdout, file=sys.stdout)
            if stderr:
                print(stderr, file=sys.stderr)

            DESCRIPTION = join(source.WORK_DIR, "DESCRIPTION")
            if not isfile(DESCRIPTION):
                sub_description_pkg = join(source.WORK_DIR, 'pkg', "DESCRIPTION")
                sub_description_name = join(source.WORK_DIR, package.split('/')[-1], "DESCRIPTION")
                if isfile(sub_description_pkg):
                    DESCRIPTION = sub_description_pkg
                elif isfile(sub_description_name):
                    DESCRIPTION = sub_description_name
                else:
                    sys.exit("%s does not appear to be a valid R package (no DESCRIPTION file)" % package)

            with open(DESCRIPTION) as f:
                description_text = clear_trailing_whitespace(f.read())

            d = dict_from_cran_lines(remove_package_line_continuations(description_text.splitlines()))
            d['orig_description'] = description_text
            package = d['Package'].lower()
            cran_metadata[package] = d

        if package.startswith('r-'):
            package = package[2:]
        if package.endswith('/'):
            package = package[:-1]
        if package.lower() not in cran_metadata:
            sys.exit("Package %s not found" % package)

        # Make sure package is always uses the CRAN capitalization
        package = cran_metadata[package.lower()]['Package']

        if not is_github_url:
            session = get_session(output_dir)
            cran_metadata[package.lower()].update(get_package_metadata(args.cran_url,
            package, session))

        dir_path = join(output_dir, 'r-' + package.lower())
        if exists(dir_path) and not args.version_compare:
            raise RuntimeError("directory already exists: %s" % dir_path)

        cran_package = cran_metadata[package.lower()]

        d = package_dicts.setdefault(package,
            {
                'cran_packagename': package,
                'packagename': 'r-' + package.lower(),
                'build_depends': '',
                'run_depends': '',
                # CRAN doesn't seem to have this metadata :(
                'home_comment': '#',
                'homeurl': '',
                'summary_comment': '#',
                'summary': '',
            })

        if is_github_url:
            d['url_key'] = ''
            d['fn_key'] = ''
            d['git_url_key'] = 'git_url:'
            d['git_tag_key'] = 'git_tag:'
            d['filename'] = ''
            d['cranurl'] = ''
            d['git_url'] = url
            d['git_tag'] = git_tag
        else:
            d['url_key'] = 'url:'
            d['fn_key'] = 'fn:'
            d['git_url_key'] = ''
            d['git_tag_key'] = ''
            d['git_url'] = ''
            d['git_tag'] = ''

        if args.version:
            raise NotImplementedError("Package versions from CRAN are not yet implemented")
            [version] = args.version
            d['version'] = version

        d['cran_version'] = cran_package['Version']
        # Conda versions cannot have -. Conda (verlib) will treat _ as a .
        d['conda_version'] = d['cran_version'].replace('-', '_')
        if args.version_compare:
            sys.exit(not version_compare(dir_path, d['conda_version']))

        if not is_github_url:
            d['filename'] = "{cran_packagename}_{cran_version}.tar.gz".format(**d)
            if args.archive:
                d['cranurl'] = (INDENT + args.cran_url + 'src/contrib/' +
                    d['filename'] + INDENT + args.cran_url + 'src/contrib/' +
                    'Archive/' + d['cran_packagename'] + '/' + d['filename'])
            else:
                d['cranurl'] = ' ' + args.cran_url + 'src/contrib/' + d['filename']

        d['cran_metadata'] = '\n'.join(['# %s' % l for l in
            cran_package['orig_lines'] if l])

        # XXX: We should maybe normalize these
        d['license'] = cran_package.get("License", "None")
        if 'License_is_FOSS' in cran_package:
            d['license'] += ' (FOSS)'
        if cran_package.get('License_restricts_use', None) == 'yes':
            d['license'] += ' (Restricts use)'

        if "URL" in cran_package:
            d['home_comment'] = ''
            d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL'])

        if 'Description' in cran_package:
            d['summary_comment'] = ''
            d['summary'] = ' ' + yaml_quote_string(cran_package['Description'])

        if "Suggests" in cran_package:
            d['suggests'] = "# Suggests: %s" % cran_package['Suggests']
        else:
            d['suggests'] = ''

        # Every package depends on at least R.
        # I'm not sure what the difference between depends and imports is.
        depends = [s.strip() for s in cran_package.get('Depends',
            '').split(',') if s.strip()]
        imports = [s.strip() for s in cran_package.get('Imports',
            '').split(',') if s.strip()]
        links = [s.strip() for s in cran_package.get("LinkingTo",
            '').split(',') if s.strip()]

        dep_dict = {}

        for s in set(chain(depends, imports, links)):
            match = VERSION_DEPENDENCY_REGEX.match(s)
            if not match:
                sys.exit("Could not parse version from dependency of %s: %s" %
                    (package, s))
            name = match.group('name')
            archs = match.group('archs')
            relop = match.group('relop') or ''
            version = match.group('version') or ''
            version = version.replace('-', '_')
            # If there is a relop there should be a version
            assert not relop or version

            if archs:
                sys.exit("Don't know how to handle archs from dependency of "
                "package %s: %s" % (package, s))

            dep_dict[name] = '{relop}{version}'.format(relop=relop, version=version)

        if 'R' not in dep_dict:
            dep_dict['R'] = ''

        for dep_type in ['build', 'run']:
            deps = []
            for name in sorted(dep_dict):
                if name in R_BASE_PACKAGE_NAMES:
                    continue
                if name == 'R':
                    # Put R first
                    if d['cran_packagename'] in R_RECOMMENDED_PACKAGE_NAMES and dep_type == 'build':
                        # On Linux and OS X, r is a metapackage depending on
                        # r-base and r-recommended. Recommended packages cannot
                        # build depend on r as they would then build depend on
                        # themselves and the built package would end up being
                        # empty (because conda would find no new files)
                        r_name = 'r-base'
                    else:
                        r_name = 'r'
                    # We don't include any R version restrictions because we
                    # always build R packages against an exact R version
                    deps.insert(0, '{indent}{r_name}'.format(indent=INDENT, r_name=r_name))
                else:
                    conda_name = 'r-' + name.lower()

                    # The r package on Windows includes the recommended packages
                    if name in R_RECOMMENDED_PACKAGE_NAMES:
                        end = ' # [not win]'
                    else:
                        end = ''
                    if dep_dict[name]:
                        deps.append('{indent}{name} {version}{end}'.format(name=conda_name,
                            version=dep_dict[name], end=end, indent=INDENT))
                    else:
                        deps.append('{indent}{name}{end}'.format(name=conda_name,
                            indent=INDENT, end=end))
                    if args.recursive:
                        if not exists(join(output_dir, conda_name)):
                            args.packages.append(name)

            if cran_package.get("NeedsCompilation", 'no') == 'yes':
                if dep_type == 'build':
                    deps.append('{indent}gcc # [not win]'.format(indent=INDENT))
                else:
                    deps.append('{indent}libgcc # [not win]'.format(indent=INDENT))
            d['%s_depends' % dep_type] = ''.join(deps)

    for package in package_dicts:
        d = package_dicts[package]
        name = d['packagename']
    
        #Normalize the metadata values
        d = {k:unicodedata.normalize("NFKD", compat.text_type(v)).encode('ascii', 'ignore') for k, v in d.iteritems()}

        makedirs(join(output_dir, name))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, name, 'meta.yaml'), 'w') as f:
            f.write(clear_trailing_whitespace(CRAN_META.format(**d)))
        with open(join(output_dir, name, 'build.sh'), 'w') as f:
            f.write(CRAN_BUILD_SH.format(**d))
        with open(join(output_dir, name, 'bld.bat'), 'w') as f:
            f.write(CRAN_BLD_BAT.format(**d))

    print("Done")