def skeletonize(packages, output_dir=".", version=None, git_tag=None, cran_url="https://cran.r-project.org/", recursive=False, archive=True, version_compare=False, update_outdated=False, config=None): if not config: config = Config() if len(packages) > 1 and version_compare: raise ValueError( "--version-compare only works with one package at a time") if not update_outdated and not packages: raise ValueError("At least one package must be supplied") package_dicts = {} cran_metadata = get_cran_metadata(cran_url, output_dir) if update_outdated: packages = get_outdated(output_dir, cran_metadata, packages) for pkg in packages: rm_rf(join(output_dir[0], 'r-' + pkg)) while packages: package = packages.pop() is_github_url = 'github.com' in package url = package if is_github_url: rm_rf(config.work_dir) m = metadata.MetaData.fromdict({'source': { 'git_url': package }}, config=config) source.git_source(m.get_section('source'), m.config.git_cache, m.config.work_dir) git_tag = git_tag[0] if git_tag else get_latest_git_tag(config) p = subprocess.Popen(['git', 'checkout', git_tag], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=config.work_dir) stdout, stderr = p.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if p.returncode: sys.exit( "Error: 'git checkout %s' failed (%s).\nInvalid tag?" % (git_tag, stderr.strip())) if stdout: print(stdout, file=sys.stdout) if stderr: print(stderr, file=sys.stderr) DESCRIPTION = join(config.work_dir, "DESCRIPTION") if not isfile(DESCRIPTION): sub_description_pkg = join(config.work_dir, 'pkg', "DESCRIPTION") sub_description_name = join(config.work_dir, package.split('/')[-1], "DESCRIPTION") if isfile(sub_description_pkg): DESCRIPTION = sub_description_pkg elif isfile(sub_description_name): DESCRIPTION = sub_description_name else: sys.exit( "%s does not appear to be a valid R package " "(no DESCRIPTION file in %s, %s)" % (package, sub_description_pkg, sub_description_name)) with open(DESCRIPTION) as f: description_text = clear_trailing_whitespace(f.read()) d = dict_from_cran_lines( remove_package_line_continuations( description_text.splitlines())) d['orig_description'] = description_text package = d['Package'].lower() cran_metadata[package] = d if package.startswith('r-'): package = package[2:] if package.endswith('/'): package = package[:-1] if package.lower() not in cran_metadata: sys.exit("Package %s not found" % package) # Make sure package is always uses the CRAN capitalization package = cran_metadata[package.lower()]['Package'] if not is_github_url: session = get_session(output_dir) cran_metadata[package.lower()].update( get_package_metadata(cran_url, package, session)) dir_path = join(output_dir, 'r-' + package.lower()) if exists(dir_path) and not version_compare: raise RuntimeError("directory already exists: %s" % dir_path) cran_package = cran_metadata[package.lower()] d = package_dicts.setdefault( package, { 'cran_packagename': package, 'packagename': 'r-' + package.lower(), 'build_depends': '', 'run_depends': '', # CRAN doesn't seem to have this metadata :( 'home_comment': '#', 'homeurl': '', 'summary_comment': '#', 'summary': '', }) if is_github_url: d['url_key'] = '' d['fn_key'] = '' d['git_url_key'] = 'git_url:' d['git_tag_key'] = 'git_tag:' d['hash_entry'] = '# You can add a hash for the file here, like md5, sha1 or sha256' d['filename'] = '' d['cranurl'] = '' d['git_url'] = url d['git_tag'] = git_tag else: d['url_key'] = 'url:' d['fn_key'] = 'fn:' d['git_url_key'] = '' d['git_tag_key'] = '' d['git_url'] = '' d['git_tag'] = '' d['hash_entry'] = '' if version: d['version'] = version raise NotImplementedError( "Package versions from CRAN are not yet implemented") d['cran_version'] = cran_package['Version'] # Conda versions cannot have -. Conda (verlib) will treat _ as a . d['conda_version'] = d['cran_version'].replace('-', '_') if version_compare: sys.exit(not version_compare(dir_path, d['conda_version'])) if not is_github_url: filename = '{}_{}.tar.gz' contrib_url = cran_url + 'src/contrib/' package_url = contrib_url + filename.format( package, d['cran_version']) # calculate sha256 by downloading source sha256 = hashlib.sha256() print("Downloading source from {}".format(package_url)) sha256.update(urlopen(package_url).read()) d['hash_entry'] = 'sha256: {}'.format(sha256.hexdigest()) d['filename'] = filename.format(package, '{{ version }}') if archive: d['cranurl'] = (INDENT + contrib_url + d['filename'] + INDENT + contrib_url + 'Archive/{}/'.format(package) + d['filename']) else: d['cranurl'] = ' ' + cran_url + 'src/contrib/' + d['filename'] d['cran_metadata'] = '\n'.join( ['# %s' % l for l in cran_package['orig_lines'] if l]) # XXX: We should maybe normalize these d['license'] = cran_package.get("License", "None") d['license_family'] = guess_license_family(d['license'], allowed_license_families) if 'License_is_FOSS' in cran_package: d['license'] += ' (FOSS)' if cran_package.get('License_restricts_use') == 'yes': d['license'] += ' (Restricts use)' if "URL" in cran_package: d['home_comment'] = '' d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL']) else: # use CRAN page as homepage if nothing has been specified d['home_comment'] = '' d['homeurl'] = ' https://CRAN.R-project.org/package={}'.format( package) if 'Description' in cran_package: d['summary_comment'] = '' d['summary'] = ' ' + yaml_quote_string(cran_package['Description']) if "Suggests" in cran_package: d['suggests'] = "# Suggests: %s" % cran_package['Suggests'] else: d['suggests'] = '' # Every package depends on at least R. # I'm not sure what the difference between depends and imports is. depends = [ s.strip() for s in cran_package.get('Depends', '').split(',') if s.strip() ] imports = [ s.strip() for s in cran_package.get('Imports', '').split(',') if s.strip() ] links = [ s.strip() for s in cran_package.get("LinkingTo", '').split(',') if s.strip() ] dep_dict = {} for s in set(chain(depends, imports, links)): match = VERSION_DEPENDENCY_REGEX.match(s) if not match: sys.exit("Could not parse version from dependency of %s: %s" % (package, s)) name = match.group('name') archs = match.group('archs') relop = match.group('relop') or '' version = match.group('version') or '' version = version.replace('-', '_') # If there is a relop there should be a version assert not relop or version if archs: sys.exit("Don't know how to handle archs from dependency of " "package %s: %s" % (package, s)) dep_dict[name] = '{relop}{version}'.format(relop=relop, version=version) if 'R' not in dep_dict: dep_dict['R'] = '' for dep_type in ['build', 'run']: deps = [] for name in sorted(dep_dict): if name in R_BASE_PACKAGE_NAMES: continue if name == 'R': # Put R first # Regarless of build or run, and whether this is a recommended package or not, # it can only depend on 'r-base' since anything else can and will cause cycles # in the dependency graph. The cran metadata lists all dependencies anyway, even # those packages that are in the recommended group. r_name = 'r-base' # We don't include any R version restrictions because we # always build R packages against an exact R version deps.insert( 0, '{indent}{r_name}'.format(indent=INDENT, r_name=r_name)) else: conda_name = 'r-' + name.lower() if dep_dict[name]: deps.append('{indent}{name} {version}'.format( name=conda_name, version=dep_dict[name], indent=INDENT)) else: deps.append('{indent}{name}'.format(name=conda_name, indent=INDENT)) if recursive: if not exists(join(output_dir, conda_name)): packages.append(name) if cran_package.get("NeedsCompilation", 'no') == 'yes': if dep_type == 'build': deps.append('{indent}posix # [win]'.format( indent=INDENT)) deps.append( '{indent}{{{{native}}}}toolchain # [win]'.format( indent=INDENT)) deps.append( '{indent}gcc # [not win]'.format( indent=INDENT)) elif dep_type == 'run': deps.append( '{indent}{{{{native}}}}gcc-libs # [win]'.format( indent=INDENT)) deps.append( '{indent}libgcc # [not win]'.format( indent=INDENT)) d['%s_depends' % dep_type] = ''.join(deps) for package in package_dicts: d = package_dicts[package] name = d['packagename'] # Normalize the metadata values d = { k: unicodedata.normalize("NFKD", text_type(v)).encode('ascii', 'ignore').decode() for k, v in iteritems(d) } makedirs(join(output_dir, name)) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, name, 'meta.yaml'), 'w') as f: f.write(clear_trailing_whitespace(CRAN_META.format(**d))) with open(join(output_dir, name, 'build.sh'), 'w') as f: f.write(CRAN_BUILD_SH.format(**d)) with open(join(output_dir, name, 'bld.bat'), 'w') as f: f.write(CRAN_BLD_BAT.format(**d)) print("Done")
def main(args, parser): if len(args.packages) > 1 and args.version_compare: parser.error("--version-compare only works with one package at a time") if not args.update_outdated and not args.packages: parser.error("At least one package must be supplied") package_dicts = {} [output_dir] = args.output_dir cran_metadata = get_cran_metadata(args.cran_url, output_dir) if args.update_outdated: args.packages = get_outdated(output_dir, cran_metadata, args.packages) for pkg in args.packages: rm_rf(join(args.output_dir, 'r-' + pkg)) while args.packages: package = args.packages.pop() is_github_url = 'github.com' in package url = package if is_github_url: rm_rf(source.WORK_DIR) source.git_source({'git_url': package}, '.') git_tag = args.git_tag[0] if args.git_tag else get_latest_git_tag() p = subprocess.Popen(['git', 'checkout', git_tag], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=source.WORK_DIR) stdout, stderr = p.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if p.returncode: sys.exit("Error: 'git checkout %s' failed (%s).\nInvalid tag?" % (git_tag, stderr.strip())) if stdout: print(stdout, file=sys.stdout) if stderr: print(stderr, file=sys.stderr) DESCRIPTION = join(source.WORK_DIR, "DESCRIPTION") if not isfile(DESCRIPTION): sub_description_pkg = join(source.WORK_DIR, 'pkg', "DESCRIPTION") sub_description_name = join(source.WORK_DIR, package.split('/')[-1], "DESCRIPTION") if isfile(sub_description_pkg): DESCRIPTION = sub_description_pkg elif isfile(sub_description_name): DESCRIPTION = sub_description_name else: sys.exit("%s does not appear to be a valid R package (no DESCRIPTION file)" % package) with open(DESCRIPTION) as f: description_text = clear_trailing_whitespace(f.read()) d = dict_from_cran_lines(remove_package_line_continuations(description_text.splitlines())) d['orig_description'] = description_text package = d['Package'].lower() cran_metadata[package] = d if package.startswith('r-'): package = package[2:] if package.endswith('/'): package = package[:-1] if package.lower() not in cran_metadata: sys.exit("Package %s not found" % package) # Make sure package is always uses the CRAN capitalization package = cran_metadata[package.lower()]['Package'] if not is_github_url: session = get_session(output_dir) cran_metadata[package.lower()].update(get_package_metadata(args.cran_url, package, session)) dir_path = join(output_dir, 'r-' + package.lower()) if exists(dir_path) and not args.version_compare: raise RuntimeError("directory already exists: %s" % dir_path) cran_package = cran_metadata[package.lower()] d = package_dicts.setdefault(package, { 'cran_packagename': package, 'packagename': 'r-' + package.lower(), 'build_depends': '', 'run_depends': '', # CRAN doesn't seem to have this metadata :( 'home_comment': '#', 'homeurl': '', 'summary_comment': '#', 'summary': '', }) if is_github_url: d['url_key'] = '' d['fn_key'] = '' d['git_url_key'] = 'git_url:' d['git_tag_key'] = 'git_tag:' d['filename'] = '' d['cranurl'] = '' d['git_url'] = url d['git_tag'] = git_tag else: d['url_key'] = 'url:' d['fn_key'] = 'fn:' d['git_url_key'] = '' d['git_tag_key'] = '' d['git_url'] = '' d['git_tag'] = '' if args.version: raise NotImplementedError("Package versions from CRAN are not yet implemented") [version] = args.version d['version'] = version d['cran_version'] = cran_package['Version'] # Conda versions cannot have -. Conda (verlib) will treat _ as a . d['conda_version'] = d['cran_version'].replace('-', '_') if args.version_compare: sys.exit(not version_compare(dir_path, d['conda_version'])) if not is_github_url: d['filename'] = "{cran_packagename}_{cran_version}.tar.gz".format(**d) if args.archive: d['cranurl'] = (INDENT + args.cran_url + 'src/contrib/' + d['filename'] + INDENT + args.cran_url + 'src/contrib/' + 'Archive/' + d['cran_packagename'] + '/' + d['filename']) else: d['cranurl'] = ' ' + args.cran_url + 'src/contrib/' + d['filename'] d['cran_metadata'] = '\n'.join(['# %s' % l for l in cran_package['orig_lines'] if l]) # XXX: We should maybe normalize these d['license'] = cran_package.get("License", "None") if 'License_is_FOSS' in cran_package: d['license'] += ' (FOSS)' if cran_package.get('License_restricts_use', None) == 'yes': d['license'] += ' (Restricts use)' if "URL" in cran_package: d['home_comment'] = '' d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL']) if 'Description' in cran_package: d['summary_comment'] = '' d['summary'] = ' ' + yaml_quote_string(cran_package['Description']) if "Suggests" in cran_package: d['suggests'] = "# Suggests: %s" % cran_package['Suggests'] else: d['suggests'] = '' # Every package depends on at least R. # I'm not sure what the difference between depends and imports is. depends = [s.strip() for s in cran_package.get('Depends', '').split(',') if s.strip()] imports = [s.strip() for s in cran_package.get('Imports', '').split(',') if s.strip()] links = [s.strip() for s in cran_package.get("LinkingTo", '').split(',') if s.strip()] dep_dict = {} for s in set(chain(depends, imports, links)): match = VERSION_DEPENDENCY_REGEX.match(s) if not match: sys.exit("Could not parse version from dependency of %s: %s" % (package, s)) name = match.group('name') archs = match.group('archs') relop = match.group('relop') or '' version = match.group('version') or '' version = version.replace('-', '_') # If there is a relop there should be a version assert not relop or version if archs: sys.exit("Don't know how to handle archs from dependency of " "package %s: %s" % (package, s)) dep_dict[name] = '{relop}{version}'.format(relop=relop, version=version) if 'R' not in dep_dict: dep_dict['R'] = '' for dep_type in ['build', 'run']: deps = [] for name in sorted(dep_dict): if name in R_BASE_PACKAGE_NAMES: continue if name == 'R': # Put R first if d['cran_packagename'] in R_RECOMMENDED_PACKAGE_NAMES and dep_type == 'build': # On Linux and OS X, r is a metapackage depending on # r-base and r-recommended. Recommended packages cannot # build depend on r as they would then build depend on # themselves and the built package would end up being # empty (because conda would find no new files) r_name = 'r-base' else: r_name = 'r' # We don't include any R version restrictions because we # always build R packages against an exact R version deps.insert(0, '{indent}{r_name}'.format(indent=INDENT, r_name=r_name)) else: conda_name = 'r-' + name.lower() # The r package on Windows includes the recommended packages if name in R_RECOMMENDED_PACKAGE_NAMES: end = ' # [not win]' else: end = '' if dep_dict[name]: deps.append('{indent}{name} {version}{end}'.format(name=conda_name, version=dep_dict[name], end=end, indent=INDENT)) else: deps.append('{indent}{name}{end}'.format(name=conda_name, indent=INDENT, end=end)) if args.recursive: if not exists(join(output_dir, conda_name)): args.packages.append(name) if cran_package.get("NeedsCompilation", 'no') == 'yes': if dep_type == 'build': deps.append('{indent}gcc # [not win]'.format(indent=INDENT)) else: deps.append('{indent}libgcc # [not win]'.format(indent=INDENT)) d['%s_depends' % dep_type] = ''.join(deps) for package in package_dicts: d = package_dicts[package] name = d['packagename'] makedirs(join(output_dir, name)) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, name, 'meta.yaml'), 'w') as f: f.write(clear_trailing_whitespace(CRAN_META.format(**d))) with open(join(output_dir, name, 'build.sh'), 'w') as f: f.write(CRAN_BUILD_SH.format(**d)) with open(join(output_dir, name, 'bld.bat'), 'w') as f: f.write(CRAN_BLD_BAT.format(**d)) print("Done")
def skeletonize(packages, output_dir=".", version=None, git_tag=None, cran_url="https://cran.r-project.org/", recursive=False, archive=True, version_compare=False, update_outdated=False, config=None): if not config: config = Config() if len(packages) > 1 and version_compare: raise ValueError("--version-compare only works with one package at a time") if not update_outdated and not packages: raise ValueError("At least one package must be supplied") package_dicts = {} cran_metadata = get_cran_metadata(cran_url, output_dir) if update_outdated: packages = get_outdated(output_dir, cran_metadata, packages) for pkg in packages: rm_rf(join(output_dir[0], 'r-' + pkg)) while packages: package = packages.pop() is_github_url = 'github.com' in package url = package if is_github_url: rm_rf(config.work_dir) m = metadata.MetaData.fromdict({'source': {'git_url': package}}, config=config) source.git_source(m.get_section('source'), m.config.git_cache, m.config.work_dir) git_tag = git_tag[0] if git_tag else get_latest_git_tag(config) p = subprocess.Popen(['git', 'checkout', git_tag], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=config.work_dir) stdout, stderr = p.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if p.returncode: sys.exit("Error: 'git checkout %s' failed (%s).\nInvalid tag?" % (git_tag, stderr.strip())) if stdout: print(stdout, file=sys.stdout) if stderr: print(stderr, file=sys.stderr) DESCRIPTION = join(config.work_dir, "DESCRIPTION") if not isfile(DESCRIPTION): sub_description_pkg = join(config.work_dir, 'pkg', "DESCRIPTION") sub_description_name = join(config.work_dir, package.split('/')[-1], "DESCRIPTION") if isfile(sub_description_pkg): DESCRIPTION = sub_description_pkg elif isfile(sub_description_name): DESCRIPTION = sub_description_name else: sys.exit("%s does not appear to be a valid R package " "(no DESCRIPTION file in %s, %s)" % (package, sub_description_pkg, sub_description_name)) with open(DESCRIPTION) as f: description_text = clear_trailing_whitespace(f.read()) d = dict_from_cran_lines(remove_package_line_continuations( description_text.splitlines())) d['orig_description'] = description_text package = d['Package'].lower() cran_metadata[package] = d if package.startswith('r-'): package = package[2:] if package.endswith('/'): package = package[:-1] if package.lower() not in cran_metadata: sys.exit("Package %s not found" % package) # Make sure package is always uses the CRAN capitalization package = cran_metadata[package.lower()]['Package'] if not is_github_url: session = get_session(output_dir) cran_metadata[package.lower()].update(get_package_metadata(cran_url, package, session)) dir_path = join(output_dir, 'r-' + package.lower()) if exists(dir_path) and not version_compare: raise RuntimeError("directory already exists: %s" % dir_path) cran_package = cran_metadata[package.lower()] d = package_dicts.setdefault(package, { 'cran_packagename': package, 'packagename': 'r-' + package.lower(), 'build_depends': '', 'run_depends': '', # CRAN doesn't seem to have this metadata :( 'home_comment': '#', 'homeurl': '', 'summary_comment': '#', 'summary': '', }) if is_github_url: d['url_key'] = '' d['fn_key'] = '' d['git_url_key'] = 'git_url:' d['git_tag_key'] = 'git_tag:' d['hash_entry'] = '# You can add a hash for the file here, like md5, sha1 or sha256' d['filename'] = '' d['cranurl'] = '' d['git_url'] = url d['git_tag'] = git_tag else: d['url_key'] = 'url:' d['fn_key'] = 'fn:' d['git_url_key'] = '' d['git_tag_key'] = '' d['git_url'] = '' d['git_tag'] = '' d['hash_entry'] = '' if version: d['version'] = version raise NotImplementedError("Package versions from CRAN are not yet implemented") d['cran_version'] = cran_package['Version'] # Conda versions cannot have -. Conda (verlib) will treat _ as a . d['conda_version'] = d['cran_version'].replace('-', '_') if version_compare: sys.exit(not version_compare(dir_path, d['conda_version'])) if not is_github_url: filename = '{}_{}.tar.gz' contrib_url = cran_url + 'src/contrib/' package_url = contrib_url + filename.format(package, d['cran_version']) # calculate sha256 by downloading source sha256 = hashlib.sha256() print("Downloading source from {}".format(package_url)) sha256.update(urlopen(package_url).read()) d['hash_entry'] = 'sha256: {}'.format(sha256.hexdigest()) d['filename'] = filename.format(package, '{{ version }}') if archive: d['cranurl'] = (INDENT + contrib_url + d['filename'] + INDENT + contrib_url + 'Archive/{}/'.format(package) + d['filename']) else: d['cranurl'] = ' ' + cran_url + 'src/contrib/' + d['filename'] d['cran_metadata'] = '\n'.join(['# %s' % l for l in cran_package['orig_lines'] if l]) # XXX: We should maybe normalize these d['license'] = cran_package.get("License", "None") d['license_family'] = guess_license_family(d['license'], allowed_license_families) if 'License_is_FOSS' in cran_package: d['license'] += ' (FOSS)' if cran_package.get('License_restricts_use') == 'yes': d['license'] += ' (Restricts use)' if "URL" in cran_package: d['home_comment'] = '' d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL']) else: # use CRAN page as homepage if nothing has been specified d['home_comment'] = '' d['homeurl'] = ' https://CRAN.R-project.org/package={}'.format(package) if 'Description' in cran_package: d['summary_comment'] = '' d['summary'] = ' ' + yaml_quote_string(cran_package['Description']) if "Suggests" in cran_package: d['suggests'] = "# Suggests: %s" % cran_package['Suggests'] else: d['suggests'] = '' # Every package depends on at least R. # I'm not sure what the difference between depends and imports is. depends = [s.strip() for s in cran_package.get('Depends', '').split(',') if s.strip()] imports = [s.strip() for s in cran_package.get('Imports', '').split(',') if s.strip()] links = [s.strip() for s in cran_package.get("LinkingTo", '').split(',') if s.strip()] dep_dict = {} for s in set(chain(depends, imports, links)): match = VERSION_DEPENDENCY_REGEX.match(s) if not match: sys.exit("Could not parse version from dependency of %s: %s" % (package, s)) name = match.group('name') archs = match.group('archs') relop = match.group('relop') or '' version = match.group('version') or '' version = version.replace('-', '_') # If there is a relop there should be a version assert not relop or version if archs: sys.exit("Don't know how to handle archs from dependency of " "package %s: %s" % (package, s)) dep_dict[name] = '{relop}{version}'.format(relop=relop, version=version) if 'R' not in dep_dict: dep_dict['R'] = '' for dep_type in ['build', 'run']: deps = [] for name in sorted(dep_dict): if name in R_BASE_PACKAGE_NAMES: continue if name == 'R': # Put R first # Regarless of build or run, and whether this is a recommended package or not, # it can only depend on 'r-base' since anything else can and will cause cycles # in the dependency graph. The cran metadata lists all dependencies anyway, even # those packages that are in the recommended group. r_name = 'r-base' # We don't include any R version restrictions because we # always build R packages against an exact R version deps.insert(0, '{indent}{r_name}'.format(indent=INDENT, r_name=r_name)) else: conda_name = 'r-' + name.lower() if dep_dict[name]: deps.append('{indent}{name} {version}'.format(name=conda_name, version=dep_dict[name], indent=INDENT)) else: deps.append('{indent}{name}'.format(name=conda_name, indent=INDENT)) if recursive: if not exists(join(output_dir, conda_name)): packages.append(name) if cran_package.get("NeedsCompilation", 'no') == 'yes': if dep_type == 'build': deps.append('{indent}posix # [win]'.format(indent=INDENT)) deps.append('{indent}{{{{native}}}}toolchain # [win]'.format(indent=INDENT)) deps.append('{indent}gcc # [not win]'.format(indent=INDENT)) elif dep_type == 'run': deps.append('{indent}{{{{native}}}}gcc-libs # [win]'.format(indent=INDENT)) deps.append('{indent}libgcc # [not win]'.format(indent=INDENT)) d['%s_depends' % dep_type] = ''.join(deps) for package in package_dicts: d = package_dicts[package] name = d['packagename'] # Normalize the metadata values d = {k: unicodedata.normalize("NFKD", text_type(v)).encode('ascii', 'ignore') .decode() for k, v in iteritems(d)} makedirs(join(output_dir, name)) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, name, 'meta.yaml'), 'w') as f: f.write(clear_trailing_whitespace(CRAN_META.format(**d))) with open(join(output_dir, name, 'build.sh'), 'w') as f: f.write(CRAN_BUILD_SH.format(**d)) with open(join(output_dir, name, 'bld.bat'), 'w') as f: f.write(CRAN_BLD_BAT.format(**d)) print("Done")
def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=None, version=None, git_tag=None, cran_url="https://cran.r-project.org", recursive=False, archive=True, version_compare=False, update_policy='', r_interp='r-base', use_binaries_ver=None, use_noarch_generic=False, use_rtools_win=False, config=None): output_dir = realpath(output_dir) if not config: config = Config() if len(in_packages) > 1 and version_compare: raise ValueError("--version-compare only works with one package at a time") if update_policy == 'error' and not in_packages: raise ValueError("At least one package must be supplied") package_dicts = {} package_list = [] cran_url = cran_url.rstrip('/') cran_metadata = get_cran_metadata(cran_url, output_dir) # r_recipes_in_output_dir = [] # recipes = listdir(output_dir) # for recipe in recipes: # if not recipe.startswith('r-') or not isdir(recipe): # continue # r_recipes_in_output_dir.append(recipe) for package in in_packages: inputs_dict = package_to_inputs_dict(output_dir, output_suffix, git_tag, package) if inputs_dict: package_dicts.update({inputs_dict['pkg-name']: {'inputs': inputs_dict}}) for package_name, package_dict in package_dicts.items(): package_list.append(package_name) while package_list: inputs = package_dicts[package_list.pop()]['inputs'] location = inputs['location'] pkg_name = inputs['pkg-name'] is_github_url = location and 'github.com' in location is_tarfile = location and isfile(location) and tarfile.is_tarfile(location) url = inputs['location'] dir_path = inputs['new-location'] print("Making/refreshing recipe for {}".format(pkg_name)) # Bodges GitHub packages into cran_metadata if is_github_url or is_tarfile: rm_rf(config.work_dir) if is_github_url: m = metadata.MetaData.fromdict({'source': {'git_url': location}}, config=config) source.git_source(m.get_section('source'), m.config.git_cache, m.config.work_dir) new_git_tag = git_tag if git_tag else get_latest_git_tag(config) p = subprocess.Popen(['git', 'checkout', new_git_tag], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=config.work_dir) stdout, stderr = p.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if p.returncode: sys.exit("Error: 'git checkout %s' failed (%s).\nInvalid tag?" % (new_git_tag, stderr.strip())) if stdout: print(stdout, file=sys.stdout) if stderr: print(stderr, file=sys.stderr) else: m = metadata.MetaData.fromdict({'source': {'url': location}}, config=config) source.unpack(m.get_section('source'), m.config.work_dir, m.config.src_cache, output_dir, m.config.work_dir) DESCRIPTION = join(config.work_dir, "DESCRIPTION") if not isfile(DESCRIPTION): sub_description_pkg = join(config.work_dir, 'pkg', "DESCRIPTION") sub_description_name = join(config.work_dir, location.split('/')[-1], "DESCRIPTION") if isfile(sub_description_pkg): DESCRIPTION = sub_description_pkg elif isfile(sub_description_name): DESCRIPTION = sub_description_name else: sys.exit("%s does not appear to be a valid R package " "(no DESCRIPTION file in %s, %s)" % (location, sub_description_pkg, sub_description_name)) with open(DESCRIPTION) as f: description_text = clear_whitespace(f.read()) d = dict_from_cran_lines(remove_package_line_continuations( description_text.splitlines())) d['orig_description'] = description_text package = d['Package'].lower() cran_metadata[package] = d else: package = pkg_name if pkg_name not in cran_metadata: sys.exit("Package %s not found" % pkg_name) # Make sure package always uses the CRAN capitalization package = cran_metadata[package.lower()]['Package'] if not is_github_url and not is_tarfile: session = get_session(output_dir) cran_metadata[package.lower()].update(get_package_metadata(cran_url, package, session)) cran_package = cran_metadata[package.lower()] package_dicts[package.lower()].update( { 'cran_packagename': package, 'packagename': 'r-' + package.lower(), 'patches': '', 'build_number': 0, 'build_depends': '', 'host_depends': '', 'run_depends': '', # CRAN doesn't seem to have this metadata :( 'home_comment': '#', 'homeurl': '', 'summary_comment': '#', 'summary': '', }) d = package_dicts[package.lower()] d['binary1'] = '' d['binary2'] = '' if version: d['version'] = version raise NotImplementedError("Package versions from CRAN are not yet implemented") d['cran_version'] = cran_package['Version'] # Conda versions cannot have -. Conda (verlib) will treat _ as a . d['conda_version'] = d['cran_version'].replace('-', '_') if version_compare: sys.exit(not version_compare(dir_path, d['conda_version'])) patches = [] script_env = [] extra_recipe_maintainers = [] build_number = 0 if update_policy.startswith('merge') and inputs['old-metadata']: m = inputs['old-metadata'] patches = make_array(m, 'source/patches') script_env = make_array(m, 'build/script_env') extra_recipe_maintainers = make_array(m, 'extra/recipe-maintainers', add_maintainer) if m.version() == d['conda_version']: build_number = int(m.get_value('build/number', 0)) build_number += 1 if update_policy == 'merge-incr-build-num' else 0 if add_maintainer: new_maintainer = "{indent}{add_maintainer}".format(indent=INDENT, add_maintainer=add_maintainer) if new_maintainer not in extra_recipe_maintainers: if not len(extra_recipe_maintainers): # We hit this case when there is no existing recipe. extra_recipe_maintainers = make_array({}, 'extra/recipe-maintainers', True) extra_recipe_maintainers.append(new_maintainer) if len(extra_recipe_maintainers): extra_recipe_maintainers[1:].sort() extra_recipe_maintainers.insert(0, "extra:\n ") d['extra_recipe_maintainers'] = ''.join(extra_recipe_maintainers) d['patches'] = ''.join(patches) d['script_env'] = ''.join(script_env) d['build_number'] = build_number cached_path = None cran_layout = {'source': {'selector': '{others}', 'dir': 'src/contrib/', 'ext': '.tar.gz', # If we had platform filters we would change this to: # build_for_linux or is_github_url or is_tarfile 'use_this': True}, 'win-64': {'selector': 'win64', 'dir': 'bin/windows/contrib/{}/'.format(use_binaries_ver), 'ext': '.zip', 'use_this': True if use_binaries_ver else False}, 'osx-64': {'selector': 'osx', 'dir': 'bin/macosx/el-capitan/contrib/{}/'.format( use_binaries_ver), 'ext': '.tgz', 'use_this': True if use_binaries_ver else False}} available = {} for archive_type, archive_details in iteritems(cran_layout): contrib_url = '' if archive_details['use_this']: if is_tarfile: filename = basename(location) contrib_url = relpath(location, dir_path) contrib_url_rendered = package_url = contrib_url sha256 = hashlib.sha256() cached_path = location elif not is_github_url: filename = '{}_{}'.format(package, d['cran_version']) + archive_details['ext'] contrib_url = '{{{{ cran_mirror }}}}/{}'.format(archive_details['dir']) contrib_url_rendered = cran_url + '/{}'.format(archive_details['dir']) package_url = contrib_url_rendered + filename sha256 = hashlib.sha256() print("Downloading {} from {}".format(archive_type, package_url)) # We may need to inspect the file later to determine which compilers are needed. cached_path, _ = source.download_to_cache(config.src_cache, '', dict({'url': package_url, 'fn': archive_type + '-' + filename})) available_details = {} available_details['selector'] = archive_details['selector'] if cached_path: sha256.update(open(cached_path, 'rb').read()) available_details['filename'] = filename available_details['contrib_url'] = contrib_url available_details['contrib_url_rendered'] = contrib_url_rendered available_details['package_url'] = package_url available_details['hash_entry'] = 'sha256: {}'.format(sha256.hexdigest()) available_details['cached_path'] = cached_path # This is rubbish; d[] should be renamed global[] and should be # merged into source and binaryN. if archive_type == 'source': if is_github_url: available_details['url_key'] = '' available_details['fn_key'] = '' available_details['git_url_key'] = 'git_url:' available_details['git_tag_key'] = 'git_tag:' hash_msg = '# You can add a hash for the file here, (md5, sha1 or sha256)' available_details['hash_entry'] = hash_msg available_details['filename'] = '' available_details['cranurl'] = '' available_details['git_url'] = url available_details['git_tag'] = new_git_tag available_details['archive_keys'] = '' else: available_details['url_key'] = 'url:' available_details['fn_key'] = 'fn:' available_details['git_url_key'] = '' available_details['git_tag_key'] = '' available_details['cranurl'] = ' ' + contrib_url + filename available_details['git_url'] = '' available_details['git_tag'] = '' available_details['patches'] = d['patches'] available[archive_type] = available_details # Figure out the selectors according to what is available. _all = ['linux', 'win32', 'win64', 'osx'] from_source = _all[:] binary_id = 1 for archive_type, archive_details in iteritems(available): if archive_type != 'source': sel = archive_details['selector'] from_source.remove(sel) binary_id += 1 else: for k, v in iteritems(archive_details): d[k] = v if from_source == _all: sel_src = "" sel_src_and_win = ' # [win]' sel_src_not_win = ' # [not win]' else: sel_src = ' # [' + ' or '.join(from_source) + ']' sel_src_and_win = ' # [' + ' or '.join(fs for fs in from_source if fs.startswith('win')) + ']' sel_src_not_win = ' # [' + ' or '.join(fs for fs in from_source if not fs.startswith('win')) + ']' d['sel_src'] = sel_src d['sel_src_and_win'] = sel_src_and_win d['sel_src_not_win'] = sel_src_not_win if 'source' in available: available_details = available['source'] available_details['sel'] = sel_src filename = available_details['filename'] if 'contrib_url' in available_details: contrib_url = available_details['contrib_url'] if archive: if is_tarfile: available_details['cranurl'] = (INDENT + contrib_url) else: available_details['cranurl'] = (INDENT + contrib_url + filename + sel_src + INDENT + contrib_url + 'Archive/{}/'.format(package) + filename + sel_src) else: available_details['cranurl'] = ' ' + contrib_url + filename + sel_src if not is_github_url: available_details['archive_keys'] = '{fn_key} {filename}{sel}\n' \ ' {url_key}{sel}' \ ' {cranurl}\n' \ ' {hash_entry}{sel}'.format( **available_details) d['cran_metadata'] = '\n'.join(['# %s' % l for l in cran_package['orig_lines'] if l]) # Render the source and binaryN keys binary_id = 1 for archive_type, archive_details in iteritems(available): if archive_type == 'source': d['source'] = SOURCE_META.format(**archive_details) else: archive_details['sel'] = ' # [' + archive_details['selector'] + ']' d['binary' + str(binary_id)] = BINARY_META.format(**archive_details) binary_id += 1 # XXX: We should maybe normalize these d['license'] = cran_package.get("License", "None") d['license_family'] = guess_license_family(d['license'], allowed_license_families) if 'License_is_FOSS' in cran_package: d['license'] += ' (FOSS)' if cran_package.get('License_restricts_use') == 'yes': d['license'] += ' (Restricts use)' if "URL" in cran_package: d['home_comment'] = '' d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL']) else: # use CRAN page as homepage if nothing has been specified d['home_comment'] = '' if is_github_url: d['homeurl'] = ' {}'.format(location) else: d['homeurl'] = ' https://CRAN.R-project.org/package={}'.format(package) if not use_noarch_generic or cran_package.get("NeedsCompilation", 'no') == 'yes': d['noarch_generic'] = '' else: d['noarch_generic'] = 'noarch: generic' if 'Description' in cran_package: d['summary_comment'] = '' d['summary'] = ' ' + yaml_quote_string(cran_package['Description']) if "Suggests" in cran_package: d['suggests'] = "# Suggests: %s" % cran_package['Suggests'] else: d['suggests'] = '' # Every package depends on at least R. # I'm not sure what the difference between depends and imports is. depends = [s.strip() for s in cran_package.get('Depends', '').split(',') if s.strip()] imports = [s.strip() for s in cran_package.get('Imports', '').split(',') if s.strip()] links = [s.strip() for s in cran_package.get("LinkingTo", '').split(',') if s.strip()] dep_dict = {} seen = set() for s in list(chain(imports, depends, links)): match = VERSION_DEPENDENCY_REGEX.match(s) if not match: sys.exit("Could not parse version from dependency of %s: %s" % (package, s)) name = match.group('name') if name in seen: continue seen.add(name) archs = match.group('archs') relop = match.group('relop') or '' ver = match.group('version') or '' ver = ver.replace('-', '_') # If there is a relop there should be a version assert not relop or ver if archs: sys.exit("Don't know how to handle archs from dependency of " "package %s: %s" % (package, s)) dep_dict[name] = '{relop}{version}'.format(relop=relop, version=ver) if 'R' not in dep_dict: dep_dict['R'] = '' need_git = is_github_url if cran_package.get("NeedsCompilation", 'no') == 'yes': with tarfile.open(available['source']['cached_path']) as tf: need_f = any([f.name.lower().endswith(('.f', '.f90', '.f77')) for f in tf]) # Fortran builds use CC to perform the link (they do not call the linker directly). need_c = True if need_f else \ any([f.name.lower().endswith('.c') for f in tf]) need_cxx = any([f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++')) for f in tf]) need_autotools = any([f.name.lower().endswith('/configure') for f in tf]) need_make = True if any((need_autotools, need_f, need_cxx, need_c)) else \ any([f.name.lower().endswith(('/makefile', '/makevars')) for f in tf]) else: need_c = need_cxx = need_f = need_autotools = need_make = False if 'Rcpp' in dep_dict or 'RcppArmadillo' in dep_dict: need_cxx = True if need_cxx: need_c = True for dep_type in ['build', 'host', 'run']: deps = [] # Put non-R dependencies first. if dep_type == 'build': if need_c: deps.append("{indent}{{{{ compiler('c') }}}} {sel}".format( indent=INDENT, sel=sel_src_not_win)) if need_cxx: deps.append("{indent}{{{{ compiler('cxx') }}}} {sel}".format( indent=INDENT, sel=sel_src_not_win)) if need_f: deps.append("{indent}{{{{ compiler('fortran') }}}} {sel}".format( indent=INDENT, sel=sel_src_not_win)) if use_rtools_win: need_c = need_cxx = need_f = need_autotools = need_make = False deps.append("{indent}{{{{native}}}}rtools {sel}".format( indent=INDENT, sel=sel_src_and_win)) deps.append("{indent}{{{{native}}}}extsoft {sel}".format( indent=INDENT, sel=sel_src_and_win)) if need_c or need_cxx or need_f: deps.append("{indent}{{{{native}}}}toolchain {sel}".format( indent=INDENT, sel=sel_src_and_win)) if need_autotools or need_make or need_git: deps.append("{indent}{{{{posix}}}}filesystem {sel}".format( indent=INDENT, sel=sel_src_and_win)) if need_git: deps.append("{indent}{{{{posix}}}}git".format(indent=INDENT)) if need_autotools: deps.append("{indent}{{{{posix}}}}sed {sel}".format( indent=INDENT, sel=sel_src_and_win)) deps.append("{indent}{{{{posix}}}}grep {sel}".format( indent=INDENT, sel=sel_src_and_win)) deps.append("{indent}{{{{posix}}}}autoconf {sel}".format( indent=INDENT, sel=sel_src)) deps.append("{indent}{{{{posix}}}}automake-wrapper {sel}".format( indent=INDENT, sel=sel_src_and_win)) deps.append("{indent}{{{{posix}}}}automake {sel}".format( indent=INDENT, sel=sel_src_and_win)) deps.append("{indent}{{{{posix}}}}pkg-config".format(indent=INDENT)) if need_make: deps.append("{indent}{{{{posix}}}}make {sel}".format( indent=INDENT, sel=sel_src)) elif dep_type == 'run': if need_c or need_cxx or need_f: deps.append("{indent}{{{{native}}}}gcc-libs {sel}".format( indent=INDENT, sel=sel_src_and_win)) if dep_type == 'host' or dep_type == 'run': for name in sorted(dep_dict): if name in R_BASE_PACKAGE_NAMES: continue if name == 'R': # Put R first # Regarless of build or run, and whether this is a # recommended package or not, it can only depend on # r_interp since anything else can and will cause # cycles in the dependency graph. The cran metadata # lists all dependencies anyway, even those packages # that are in the recommended group. # We don't include any R version restrictions because # conda-build always pins r-base and mro-base version. deps.insert(0, '{indent}{r_name}'.format(indent=INDENT, r_name=r_interp)) else: conda_name = 'r-' + name.lower() if dep_dict[name]: deps.append('{indent}{name} {version}'.format(name=conda_name, version=dep_dict[name], indent=INDENT)) else: deps.append('{indent}{name}'.format(name=conda_name, indent=INDENT)) if recursive: lower_name = name.lower() if lower_name not in package_dicts: inputs_dict = package_to_inputs_dict(output_dir, output_suffix, git_tag, lower_name) assert lower_name == inputs_dict['pkg-name'], \ "name %s != inputs_dict['pkg-name'] %s" % ( name, inputs_dict['pkg-name']) assert lower_name not in package_list package_dicts.update({lower_name: {'inputs': inputs_dict}}) package_list.append(lower_name) d['%s_depends' % dep_type] = ''.join(deps) for package in package_dicts: d = package_dicts[package] dir_path = d['inputs']['new-location'] if exists(dir_path) and not version_compare: if update_policy == 'error': raise RuntimeError("directory already exists " "(and --update-policy is 'error'): %s" % dir_path) elif update_policy == 'overwrite': rm_rf(dir_path) elif update_policy == 'skip-up-to-date' and up_to_date(cran_metadata, d['inputs']['old-metadata']): continue elif update_policy == 'skip-existing' and d['inputs']['old-metadata']: continue # Normalize the metadata values d = {k: unicodedata.normalize("NFKD", text_type(v)).encode('ascii', 'ignore') .decode() for k, v in iteritems(d)} try: makedirs(join(dir_path)) except: pass print("Writing recipe for %s" % package.lower()) with open(join(dir_path, 'meta.yaml'), 'w') as f: f.write(clear_whitespace(CRAN_META.format(**d))) if not exists(join(dir_path, 'build.sh')) or update_policy == 'overwrite': with open(join(dir_path, 'build.sh'), 'w') as f: if from_source == all: f.write(CRAN_BUILD_SH_SOURCE.format(**d)) elif from_source == []: f.write(CRAN_BUILD_SH_BINARY.format(**d)) else: tpbt = [target_platform_bash_test_by_sel[t] for t in from_source] d['source_pf_bash'] = ' || '.join(['[[ $target_platform ' + s + ' ]]' for s in tpbt]) f.write(CRAN_BUILD_SH_MIXED.format(**d)) if not exists(join(dir_path, 'bld.bat')) or update_policy == 'overwrite': with open(join(dir_path, 'bld.bat'), 'w') as f: if len([fs for fs in from_source if fs.startswith('win')]) == 2: f.write(CRAN_BLD_BAT_SOURCE.format(**d)) else: f.write(CRAN_BLD_BAT_MIXED.format(**d))
def main(args, parser): package_dicts = {} [output_dir] = args.output_dir session = requests.Session() try: import cachecontrol import cachecontrol.caches except ImportError: print("Tip: install CacheControl to cache the CRAN metadata") else: session = cachecontrol.CacheControl(session, cache=cachecontrol.caches.FileCache(join(output_dir, '.web_cache'))) print("Fetching metadata from %s" % args.cran_url) r = session.get(args.cran_url + "src/contrib/PACKAGES") r.raise_for_status() PACKAGES = r.text package_list = [remove_package_line_continuations(i.splitlines()) for i in PACKAGES.split('\n\n')] cran_metadata = {d['Package'].lower(): d for d in map(dict_from_cran_lines, package_list)} while args.packages: package = args.packages.pop() is_github_url = 'github.com' in package url = package if is_github_url: rm_rf(source.WORK_DIR) source.git_source({'git_url': package}, '.') DESCRIPTION = join(source.WORK_DIR, "DESCRIPTION") if not isfile(DESCRIPTION): sys.exit("%s does not appear to be a valid R package (no DESCRIPTION file)" % package) with open(DESCRIPTION) as f: description_text = f.read() d = dict_from_cran_lines(remove_package_line_continuations(description_text.splitlines())) d['orig_description'] = description_text package = d['Package'].lower() cran_metadata[package] = d if package.startswith('r-'): package = package[2:] if package.lower() not in cran_metadata: sys.exit("Package %s not found" % package) # Make sure package is always uses the CRAN capitalization package = cran_metadata[package.lower()]['Package'] if not is_github_url: cran_metadata[package.lower()].update(get_package_metadata(args.cran_url, package, session)) dir_path = join(output_dir, 'r-' + package.lower()) if exists(dir_path): raise RuntimeError("directory already exists: %s" % dir_path) cran_package = cran_metadata[package.lower()] d = package_dicts.setdefault(package, { 'cran_packagename': package, 'packagename': 'r-' + package.lower(), 'build_depends': '', 'run_depends': '', # CRAN doesn't seem to have this metadata :( 'home_comment': '#', 'homeurl': '', 'summary_comment': '#', 'summary': '', }) if is_github_url: d['url_key'] = '' d['fn_key'] = '' d['git_url_key'] = 'git_url:' d['git_tag_key'] = 'git_tag:' d['filename'] = '' d['cranurl'] = '' d['git_url'] = url d['git_tag'] = get_latest_git_tag() else: d['url_key'] = 'url:' d['fn_key'] = 'fn:' d['git_url_key'] = '' d['git_tag_key'] = '' d['git_url'] = '' d['git_tag'] = '' if args.version: raise NotImplementedError("Package versions from CRAN are not yet implemented") [version] = args.version d['version'] = version d['cran_version'] = cran_package['Version'] # Conda versions cannot have -. Conda (verlib) will treat _ as a . d['conda_version'] = d['cran_version'].replace('-', '_') if not is_github_url: d['filename'] = "{cran_packagename}_{cran_version}.tar.gz".format(**d) if args.archive: d['cranurl'] = (INDENT + args.cran_url + 'src/contrib/' + d['filename'] + INDENT + args.cran_url + 'src/contrib/' + 'Archive/' + d['cran_packagename'] + '/' + d['filename']) else: d['cranurl'] = ' ' + args.cran_url + 'src/contrib/' + d['filename'] d['cran_metadata'] = '\n'.join(['# %s' % l for l in cran_package['orig_lines'] if l]) # XXX: We should maybe normalize these d['license'] = cran_package.get("License", "None") if 'License_is_FOSS' in cran_package: d['license'] += ' (FOSS)' if cran_package.get('License_restricts_use', None) == 'yes': d['license'] += ' (Restricts use)' if "URL" in cran_package: d['home_comment'] = '' d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL']) if 'Description' in cran_package: d['summary_comment'] = '' d['summary'] = ' ' + yaml_quote_string(cran_package['Description']) if "Suggests" in cran_package: d['suggests'] = "# Suggests: %s" % cran_package['Suggests'] else: d['suggests'] = '' # Every package depends on at least R. # I'm not sure what the difference between depends and imports is. depends = [s.strip() for s in cran_package.get('Depends', '').split(',') if s.strip()] imports = [s.strip() for s in cran_package.get('Imports', '').split(',') if s.strip()] links = [s.strip() for s in cran_package.get("LinkingTo", '').split(',') if s.strip()] dep_dict = {} for s in set(chain(depends, imports, links)): match = VERSION_DEPENDENCY_REGEX.match(s) if not match: sys.exit("Could not parse version from dependency of %s: %s" % (package, s)) name = match.group('name') archs = match.group('archs') relop = match.group('relop') or '' version = match.group('version') or '' version = version.replace('-', '_') # If there is a relop there should be a version assert not relop or version if archs: sys.exit("Don't know how to handle archs from dependency of " "package %s: %s" % (package, s)) dep_dict[name] = '{relop}{version}'.format(relop=relop, version=version) if 'R' not in dep_dict: dep_dict['R'] = '' for dep_type in ['build', 'run']: deps = [] for name in sorted(dep_dict): if name in R_BASE_PACKAGE_NAMES: continue if name == 'R': # Put R first if d['cran_packagename'] in R_RECOMMENDED_PACKAGE_NAMES and dep_type == 'build': # On Linux and OS X, r is a metapackage depending on # r-base and r-recommended. Recommended packages cannot # build depend on r as they would then build depend on # themselves and the built package would end up being # empty (because conda would find no new files) r_name = 'r-base' else: r_name = 'r' # We don't include any R version restrictions because we # always build R packages against an exact R version deps.insert(0, '{indent}{r_name}'.format(indent=INDENT, r_name=r_name)) else: conda_name = 'r-' + name.lower() # The r package on Windows includes the recommended packages if name in R_RECOMMENDED_PACKAGE_NAMES: end = ' # [not win]' else: end = '' if dep_dict[name]: deps.append('{indent}{name} {version}{end}'.format(name=conda_name, version=dep_dict[name], end=end, indent=INDENT)) else: deps.append('{indent}{name}{end}'.format(name=conda_name, indent=INDENT, end=end)) if args.recursive: if not exists(join(output_dir, conda_name)): args.packages.append(name) d['%s_depends' % dep_type] = ''.join(deps) for package in package_dicts: d = package_dicts[package] name = d['packagename'] makedirs(join(output_dir, name)) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, name, 'meta.yaml'), 'w') as f: f.write(clear_trailing_whitespace(CRAN_META.format(**d))) with open(join(output_dir, name, 'build.sh'), 'w') as f: f.write(CRAN_BUILD_SH.format(**d)) with open(join(output_dir, name, 'bld.bat'), 'w') as f: f.write(CRAN_BLD_BAT.format(**d)) print("Done")
def main(args, parser): if len(args.packages) > 1 and args.version_compare: parser.error("--version-compare only works with one package at a time") if not args.update_outdated and not args.packages: parser.error("At least one package must be supplied") package_dicts = {} [output_dir] = args.output_dir cran_metadata = get_cran_metadata(args.cran_url, output_dir) if args.update_outdated: args.packages = get_outdated(output_dir, cran_metadata, args.packages) for pkg in args.packages: rm_rf(join(args.output_dir, 'r-' + pkg)) while args.packages: package = args.packages.pop() is_github_url = 'github.com' in package url = package if is_github_url: rm_rf(source.WORK_DIR) source.git_source({'git_url': package}, '.') git_tag = args.git_tag[0] if args.git_tag else get_latest_git_tag() p = subprocess.Popen(['git', 'checkout', git_tag], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=source.WORK_DIR) stdout, stderr = p.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if p.returncode: sys.exit( "Error: 'git checkout %s' failed (%s).\nInvalid tag?" % (git_tag, stderr.strip())) if stdout: print(stdout, file=sys.stdout) if stderr: print(stderr, file=sys.stderr) DESCRIPTION = join(source.WORK_DIR, "DESCRIPTION") if not isfile(DESCRIPTION): sub_description_pkg = join(source.WORK_DIR, 'pkg', "DESCRIPTION") sub_description_name = join(source.WORK_DIR, package.split('/')[-1], "DESCRIPTION") if isfile(sub_description_pkg): DESCRIPTION = sub_description_pkg elif isfile(sub_description_name): DESCRIPTION = sub_description_name else: sys.exit( "%s does not appear to be a valid R package (no DESCRIPTION file)" % package) with open(DESCRIPTION) as f: description_text = clear_trailing_whitespace(f.read()) d = dict_from_cran_lines( remove_package_line_continuations( description_text.splitlines())) d['orig_description'] = description_text package = d['Package'].lower() cran_metadata[package] = d if package.startswith('r-'): package = package[2:] if package.endswith('/'): package = package[:-1] if package.lower() not in cran_metadata: sys.exit("Package %s not found" % package) # Make sure package is always uses the CRAN capitalization package = cran_metadata[package.lower()]['Package'] if not is_github_url: session = get_session(output_dir) cran_metadata[package.lower()].update( get_package_metadata(args.cran_url, package, session)) dir_path = join(output_dir, 'r-' + package.lower()) if exists(dir_path) and not args.version_compare: raise RuntimeError("directory already exists: %s" % dir_path) cran_package = cran_metadata[package.lower()] d = package_dicts.setdefault( package, { 'cran_packagename': package, 'packagename': 'r-' + package.lower(), 'build_depends': '', 'run_depends': '', # CRAN doesn't seem to have this metadata :( 'home_comment': '#', 'homeurl': '', 'summary_comment': '#', 'summary': '', }) if is_github_url: d['url_key'] = '' d['fn_key'] = '' d['git_url_key'] = 'git_url:' d['git_tag_key'] = 'git_tag:' d['filename'] = '' d['cranurl'] = '' d['git_url'] = url d['git_tag'] = git_tag else: d['url_key'] = 'url:' d['fn_key'] = 'fn:' d['git_url_key'] = '' d['git_tag_key'] = '' d['git_url'] = '' d['git_tag'] = '' if args.version: raise NotImplementedError( "Package versions from CRAN are not yet implemented") [version] = args.version d['version'] = version d['cran_version'] = cran_package['Version'] # Conda versions cannot have -. Conda (verlib) will treat _ as a . d['conda_version'] = d['cran_version'].replace('-', '_') if args.version_compare: sys.exit(not version_compare(dir_path, d['conda_version'])) if not is_github_url: d['filename'] = "{cran_packagename}_{cran_version}.tar.gz".format( **d) if args.archive: d['cranurl'] = (INDENT + args.cran_url + 'src/contrib/' + d['filename'] + INDENT + args.cran_url + 'src/contrib/' + 'Archive/' + d['cran_packagename'] + '/' + d['filename']) else: d['cranurl'] = ' ' + args.cran_url + 'src/contrib/' + d[ 'filename'] d['cran_metadata'] = '\n'.join( ['# %s' % l for l in cran_package['orig_lines'] if l]) # XXX: We should maybe normalize these d['license'] = cran_package.get("License", "None") if 'License_is_FOSS' in cran_package: d['license'] += ' (FOSS)' if cran_package.get('License_restricts_use', None) == 'yes': d['license'] += ' (Restricts use)' if "URL" in cran_package: d['home_comment'] = '' d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL']) if 'Description' in cran_package: d['summary_comment'] = '' d['summary'] = ' ' + yaml_quote_string(cran_package['Description']) if "Suggests" in cran_package: d['suggests'] = "# Suggests: %s" % cran_package['Suggests'] else: d['suggests'] = '' # Every package depends on at least R. # I'm not sure what the difference between depends and imports is. depends = [ s.strip() for s in cran_package.get('Depends', '').split(',') if s.strip() ] imports = [ s.strip() for s in cran_package.get('Imports', '').split(',') if s.strip() ] links = [ s.strip() for s in cran_package.get("LinkingTo", '').split(',') if s.strip() ] dep_dict = {} for s in set(chain(depends, imports, links)): match = VERSION_DEPENDENCY_REGEX.match(s) if not match: sys.exit("Could not parse version from dependency of %s: %s" % (package, s)) name = match.group('name') archs = match.group('archs') relop = match.group('relop') or '' version = match.group('version') or '' version = version.replace('-', '_') # If there is a relop there should be a version assert not relop or version if archs: sys.exit("Don't know how to handle archs from dependency of " "package %s: %s" % (package, s)) dep_dict[name] = '{relop}{version}'.format(relop=relop, version=version) if 'R' not in dep_dict: dep_dict['R'] = '' for dep_type in ['build', 'run']: deps = [] for name in sorted(dep_dict): if name in R_BASE_PACKAGE_NAMES: continue if name == 'R': # Put R first if d['cran_packagename'] in R_RECOMMENDED_PACKAGE_NAMES and dep_type == 'build': # On Linux and OS X, r is a metapackage depending on # r-base and r-recommended. Recommended packages cannot # build depend on r as they would then build depend on # themselves and the built package would end up being # empty (because conda would find no new files) r_name = 'r-base' else: r_name = 'r' # We don't include any R version restrictions because we # always build R packages against an exact R version deps.insert( 0, '{indent}{r_name}'.format(indent=INDENT, r_name=r_name)) else: conda_name = 'r-' + name.lower() # The r package on Windows includes the recommended packages if name in R_RECOMMENDED_PACKAGE_NAMES: end = ' # [not win]' else: end = '' if dep_dict[name]: deps.append('{indent}{name} {version}{end}'.format( name=conda_name, version=dep_dict[name], end=end, indent=INDENT)) else: deps.append('{indent}{name}{end}'.format( name=conda_name, indent=INDENT, end=end)) if args.recursive: if not exists(join(output_dir, conda_name)): args.packages.append(name) if cran_package.get("NeedsCompilation", 'no') == 'yes': if dep_type == 'build': deps.append( '{indent}gcc # [not win]'.format(indent=INDENT)) else: deps.append( '{indent}libgcc # [not win]'.format(indent=INDENT)) d['%s_depends' % dep_type] = ''.join(deps) for package in package_dicts: d = package_dicts[package] name = d['packagename'] #Normalize the metadata values d = { k: unicodedata.normalize("NFKD", compat.text_type(v)).encode( 'ascii', 'ignore') for k, v in d.iteritems() } makedirs(join(output_dir, name)) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, name, 'meta.yaml'), 'w') as f: f.write(clear_trailing_whitespace(CRAN_META.format(**d))) with open(join(output_dir, name, 'build.sh'), 'w') as f: f.write(CRAN_BUILD_SH.format(**d)) with open(join(output_dir, name, 'bld.bat'), 'w') as f: f.write(CRAN_BLD_BAT.format(**d)) print("Done")
def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=None, version=None, git_tag=None, cran_url="https://cran.r-project.org/", recursive=False, archive=True, version_compare=False, update_policy='', config=None): output_dir = realpath(output_dir) if not config: config = Config() if len(in_packages) > 1 and version_compare: raise ValueError( "--version-compare only works with one package at a time") if update_policy == 'error' and not in_packages: raise ValueError("At least one package must be supplied") package_dicts = {} package_list = [] cran_metadata = get_cran_metadata(cran_url, output_dir) # r_recipes_in_output_dir = [] # recipes = listdir(output_dir) # for recipe in recipes: # if not recipe.startswith('r-') or not isdir(recipe): # continue # r_recipes_in_output_dir.append(recipe) for package in in_packages: inputs_dict = package_to_inputs_dict(output_dir, output_suffix, git_tag, package) if inputs_dict: package_dicts.update( {inputs_dict['pkg-name']: { 'inputs': inputs_dict }}) for package_name, package_dict in package_dicts.items(): package_list.append(package_name) while package_list: inputs = package_dicts[package_list.pop()]['inputs'] location = inputs['location'] pkg_name = inputs['pkg-name'] is_github_url = location and 'github.com' in location url = inputs['location'] dir_path = inputs['new-location'] print("Making/refreshing recipe for {}".format(pkg_name)) # Bodges GitHub packages into cran_metadata if is_github_url: rm_rf(config.work_dir) m = metadata.MetaData.fromdict({'source': { 'git_url': location }}, config=config) source.git_source(m.get_section('source'), m.config.git_cache, m.config.work_dir) new_git_tag = git_tag if git_tag else get_latest_git_tag(config) p = subprocess.Popen(['git', 'checkout', new_git_tag], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=config.work_dir) stdout, stderr = p.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if p.returncode: sys.exit( "Error: 'git checkout %s' failed (%s).\nInvalid tag?" % (new_git_tag, stderr.strip())) if stdout: print(stdout, file=sys.stdout) if stderr: print(stderr, file=sys.stderr) DESCRIPTION = join(config.work_dir, "DESCRIPTION") if not isfile(DESCRIPTION): sub_description_pkg = join(config.work_dir, 'pkg', "DESCRIPTION") sub_description_name = join(config.work_dir, location.split('/')[-1], "DESCRIPTION") if isfile(sub_description_pkg): DESCRIPTION = sub_description_pkg elif isfile(sub_description_name): DESCRIPTION = sub_description_name else: sys.exit( "%s does not appear to be a valid R package " "(no DESCRIPTION file in %s, %s)" % (location, sub_description_pkg, sub_description_name)) with open(DESCRIPTION) as f: description_text = clear_trailing_whitespace(f.read()) d = dict_from_cran_lines( remove_package_line_continuations( description_text.splitlines())) d['orig_description'] = description_text package = d['Package'].lower() cran_metadata[package] = d else: package = pkg_name if pkg_name not in cran_metadata: sys.exit("Package %s not found" % pkg_name) # Make sure package always uses the CRAN capitalization package = cran_metadata[package.lower()]['Package'] if not is_github_url: session = get_session(output_dir) cran_metadata[package.lower()].update( get_package_metadata(cran_url, package, session)) cran_package = cran_metadata[package.lower()] package_dicts[package.lower()].update({ 'cran_packagename': package, 'packagename': 'r-' + package.lower(), 'patches': '', 'build_number': 0, 'build_depends': '', 'run_depends': '', # CRAN doesn't seem to have this metadata :( 'home_comment': '#', 'homeurl': '', 'summary_comment': '#', 'summary': '', }) d = package_dicts[package.lower()] if is_github_url: d['url_key'] = '' d['fn_key'] = '' d['git_url_key'] = 'git_url:' d['git_tag_key'] = 'git_tag:' d['hash_entry'] = '# You can add a hash for the file here, like md5, sha1 or sha256' d['filename'] = '' d['cranurl'] = '' d['git_url'] = url d['git_tag'] = new_git_tag else: d['url_key'] = 'url:' d['fn_key'] = 'fn:' d['git_url_key'] = '' d['git_tag_key'] = '' d['git_url'] = '' d['git_tag'] = '' d['hash_entry'] = '' if version: d['version'] = version raise NotImplementedError( "Package versions from CRAN are not yet implemented") d['cran_version'] = cran_package['Version'] # Conda versions cannot have -. Conda (verlib) will treat _ as a . d['conda_version'] = d['cran_version'].replace('-', '_') if version_compare: sys.exit(not version_compare(dir_path, d['conda_version'])) patches = [] script_env = [] extra_recipe_maintainers = [] build_number = 0 if update_policy.startswith('merge') and inputs['old-metadata']: m = inputs['old-metadata'] patches = make_array(m, 'source/patches') script_env = make_array(m, 'build/script_env') extra_recipe_maintainers = make_array(m, 'extra/recipe-maintainers', add_maintainer) if m.version() == d['conda_version']: build_number = int(m.get_value('build/number', 0)) build_number += 1 if update_policy == 'merge-incr-build-num' else 0 if not len(patches): patches.append("# patches:\n") patches.append(" # List any patch files here\n") patches.append(" # - fix.patch") if add_maintainer: new_maintainer = "{indent}{add_maintainer}".format( indent=INDENT, add_maintainer=add_maintainer) if new_maintainer not in extra_recipe_maintainers: if not len(extra_recipe_maintainers): # We hit this case when there is no existing recipe. extra_recipe_maintainers = make_array( {}, 'extra/recipe-maintainers', True) extra_recipe_maintainers.append(new_maintainer) if len(extra_recipe_maintainers): extra_recipe_maintainers[1:].sort() extra_recipe_maintainers.insert(0, "extra:\n ") d['extra_recipe_maintainers'] = ''.join(extra_recipe_maintainers) d['patches'] = ''.join(patches) d['script_env'] = ''.join(script_env) d['build_number'] = build_number cached_path = None if not is_github_url: filename = '{}_{}.tar.gz' contrib_url = cran_url + 'src/contrib/' package_url = contrib_url + filename.format( package, d['cran_version']) # calculate sha256 by downloading source sha256 = hashlib.sha256() print("Downloading source from {}".format(package_url)) # We may need to inspect the file later to determine which compilers are needed. cached_path, _ = source.download_to_cache( config.src_cache, '', dict({'url': package_url})) sha256.update(open(cached_path, 'rb').read()) d['hash_entry'] = 'sha256: {}'.format(sha256.hexdigest()) d['filename'] = filename.format(package, '{{ version }}') if archive: d['cranurl'] = (INDENT + contrib_url + d['filename'] + INDENT + contrib_url + 'Archive/{}/'.format(package) + d['filename']) else: d['cranurl'] = ' ' + cran_url + 'src/contrib/' + d['filename'] d['cran_metadata'] = '\n'.join( ['# %s' % l for l in cran_package['orig_lines'] if l]) # XXX: We should maybe normalize these d['license'] = cran_package.get("License", "None") d['license_family'] = guess_license_family(d['license'], allowed_license_families) if 'License_is_FOSS' in cran_package: d['license'] += ' (FOSS)' if cran_package.get('License_restricts_use') == 'yes': d['license'] += ' (Restricts use)' if "URL" in cran_package: d['home_comment'] = '' d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL']) else: # use CRAN page as homepage if nothing has been specified d['home_comment'] = '' if is_github_url: d['homeurl'] = ' {}'.format(location) else: d['homeurl'] = ' https://CRAN.R-project.org/package={}'.format( package) if 'Description' in cran_package: d['summary_comment'] = '' d['summary'] = ' ' + yaml_quote_string(cran_package['Description']) if "Suggests" in cran_package: d['suggests'] = "# Suggests: %s" % cran_package['Suggests'] else: d['suggests'] = '' # Every package depends on at least R. # I'm not sure what the difference between depends and imports is. depends = [ s.strip() for s in cran_package.get('Depends', '').split(',') if s.strip() ] imports = [ s.strip() for s in cran_package.get('Imports', '').split(',') if s.strip() ] links = [ s.strip() for s in cran_package.get("LinkingTo", '').split(',') if s.strip() ] dep_dict = {} seen = set() for s in list(chain(imports, depends, links)): match = VERSION_DEPENDENCY_REGEX.match(s) if not match: sys.exit("Could not parse version from dependency of %s: %s" % (package, s)) name = match.group('name') if name in seen: continue seen.add(name) archs = match.group('archs') relop = match.group('relop') or '' ver = match.group('version') or '' ver = ver.replace('-', '_') # If there is a relop there should be a version assert not relop or ver if archs: sys.exit("Don't know how to handle archs from dependency of " "package %s: %s" % (package, s)) dep_dict[name] = '{relop}{version}'.format(relop=relop, version=ver) if 'R' not in dep_dict: dep_dict['R'] = '' need_git = is_github_url if cran_package.get("NeedsCompilation", 'no') == 'yes': with tarfile.open(cached_path) as tf: need_f = any([ f.name.lower().endswith(('.f', '.f90', '.f77')) for f in tf ]) # Fortran builds use CC to perform the link (they do not call the linker directly). need_c = True if need_f else \ any([f.name.lower().endswith('.c') for f in tf]) need_cxx = any([ f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++')) for f in tf ]) need_autotools = any( [f.name.lower().endswith('/configure') for f in tf]) need_make = True if any((need_autotools, need_f, need_cxx, need_c)) else \ any([f.name.lower().endswith(('/makefile', '/makevars')) for f in tf]) else: need_c = need_cxx = need_f = need_autotools = need_make = False for dep_type in ['build', 'run']: deps = [] # Put non-R dependencies first. if dep_type == 'build': if need_c: deps.append( "{indent}{{{{ compiler('c') }}}} # [not win]". format(indent=INDENT)) if need_cxx: deps.append( "{indent}{{{{ compiler('cxx') }}}} # [not win]". format(indent=INDENT)) if need_f: deps.append( "{indent}{{{{ compiler('fortran') }}}} # [not win]". format(indent=INDENT)) if need_c or need_cxx or need_f: deps.append( "{indent}{{{{native}}}}toolchain # [win]". format(indent=INDENT)) if need_autotools or need_make or need_git: deps.append( "{indent}{{{{posix}}}}filesystem # [win]". format(indent=INDENT)) if need_git: deps.append( "{indent}{{{{posix}}}}git".format(indent=INDENT)) if need_autotools: deps.append( "{indent}{{{{posix}}}}sed # [win]". format(indent=INDENT)) deps.append( "{indent}{{{{posix}}}}grep # [win]". format(indent=INDENT)) deps.append( "{indent}{{{{posix}}}}autoconf".format(indent=INDENT)) deps.append( "{indent}{{{{posix}}}}automake".format(indent=INDENT)) deps.append("{indent}{{{{posix}}}}pkg-config".format( indent=INDENT)) if need_make: deps.append( "{indent}{{{{posix}}}}make".format(indent=INDENT)) elif dep_type == 'run': if need_c or need_cxx or need_f: deps.append( "{indent}{{{{native}}}}gcc-libs # [win]". format(indent=INDENT)) for name in sorted(dep_dict): if name in R_BASE_PACKAGE_NAMES: continue if name == 'R': # Put R first # Regarless of build or run, and whether this is a recommended package or not, # it can only depend on 'r-base' since anything else can and will cause cycles # in the dependency graph. The cran metadata lists all dependencies anyway, even # those packages that are in the recommended group. r_name = 'r-base' # We don't include any R version restrictions because we # always build R packages against an exact R version deps.insert( 0, '{indent}{r_name}'.format(indent=INDENT, r_name=r_name)) else: conda_name = 'r-' + name.lower() if dep_dict[name]: deps.append('{indent}{name} {version}'.format( name=conda_name, version=dep_dict[name], indent=INDENT)) else: deps.append('{indent}{name}'.format(name=conda_name, indent=INDENT)) if recursive: lower_name = name.lower() if lower_name not in package_dicts: inputs_dict = package_to_inputs_dict( output_dir, output_suffix, git_tag, lower_name) assert lower_name == inputs_dict['pkg-name'], \ "name %s != inputs_dict['pkg-name'] %s" % (name, inputs_dict['pkg-name']) assert lower_name not in package_list package_dicts.update( {lower_name: { 'inputs': inputs_dict }}) package_list.append(lower_name) d['%s_depends' % dep_type] = ''.join(deps) for package in package_dicts: d = package_dicts[package] dir_path = d['inputs']['new-location'] if exists(dir_path) and not version_compare: if update_policy == 'error': raise RuntimeError("directory already exists " "(and --update-policy is 'error'): %s" % dir_path) elif update_policy == 'overwrite': rm_rf(dir_path) elif update_policy == 'skip-up-to-date' and up_to_date( cran_metadata, d['inputs']['old-metadata']): continue elif update_policy == 'skip-existing' and d['inputs']['old-metadata']: continue # Normalize the metadata values d = { k: unicodedata.normalize("NFKD", text_type(v)).encode('ascii', 'ignore').decode() for k, v in iteritems(d) } try: makedirs(join(dir_path)) except: pass print("Writing recipe for %s" % package.lower()) with open(join(dir_path, 'meta.yaml'), 'w') as f: f.write(clear_trailing_whitespace(CRAN_META.format(**d))) if not exists(join(dir_path, 'build.sh')) or update_policy == 'overwrite': with open(join(dir_path, 'build.sh'), 'w') as f: f.write(CRAN_BUILD_SH.format(**d)) if not exists(join(dir_path, 'bld.bat')) or update_policy == 'overwrite': with open(join(dir_path, 'bld.bat'), 'w') as f: f.write(CRAN_BLD_BAT.format(**d))
def main(args, parser): if len(args.packages) > 1 and args.version_compare: parser.error("--version-compare only works with one package at a time") if not args.update_outdated and not args.packages: parser.error("At least one package must be supplied") package_dicts = {} [output_dir] = args.output_dir cran_metadata = get_cran_metadata(args.cran_url, output_dir) if args.update_outdated: args.packages = get_outdated(output_dir, cran_metadata, args.packages) for pkg in args.packages: rm_rf(join(args.output_dir, "r-" + pkg)) while args.packages: package = args.packages.pop() is_github_url = "github.com" in package url = package if is_github_url: rm_rf(source.WORK_DIR) source.git_source({"git_url": package}, ".") git_tag = args.git_tag[0] if args.git_tag else get_latest_git_tag() p = subprocess.Popen( ["git", "checkout", git_tag], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=source.WORK_DIR ) stdout, stderr = p.communicate() stdout = stdout.decode("utf-8") stderr = stderr.decode("utf-8") if p.returncode: sys.exit("Error: 'git checkout %s' failed (%s).\nInvalid tag?" % (git_tag, stderr.strip())) if stdout: print(stdout, file=sys.stdout) if stderr: print(stderr, file=sys.stderr) DESCRIPTION = join(source.WORK_DIR, "DESCRIPTION") if not isfile(DESCRIPTION): sub_description_pkg = join(source.WORK_DIR, "pkg", "DESCRIPTION") sub_description_name = join(source.WORK_DIR, package.split("/")[-1], "DESCRIPTION") if isfile(sub_description_pkg): DESCRIPTION = sub_description_pkg elif isfile(sub_description_name): DESCRIPTION = sub_description_name else: sys.exit("%s does not appear to be a valid R package (no DESCRIPTION file)" % package) with open(DESCRIPTION) as f: description_text = clear_trailing_whitespace(f.read()) d = dict_from_cran_lines(remove_package_line_continuations(description_text.splitlines())) d["orig_description"] = description_text package = d["Package"].lower() cran_metadata[package] = d if package.startswith("r-"): package = package[2:] if package.endswith("/"): package = package[:-1] if package.lower() not in cran_metadata: sys.exit("Package %s not found" % package) # Make sure package is always uses the CRAN capitalization package = cran_metadata[package.lower()]["Package"] if not is_github_url: session = get_session(output_dir) cran_metadata[package.lower()].update(get_package_metadata(args.cran_url, package, session)) dir_path = join(output_dir, "r-" + package.lower()) if exists(dir_path) and not args.version_compare: raise RuntimeError("directory already exists: %s" % dir_path) cran_package = cran_metadata[package.lower()] d = package_dicts.setdefault( package, { "cran_packagename": package, "packagename": "r-" + package.lower(), "build_depends": "", "run_depends": "", # CRAN doesn't seem to have this metadata :( "home_comment": "#", "homeurl": "", "summary_comment": "#", "summary": "", }, ) if is_github_url: d["url_key"] = "" d["fn_key"] = "" d["git_url_key"] = "git_url:" d["git_tag_key"] = "git_tag:" d["filename"] = "" d["cranurl"] = "" d["git_url"] = url d["git_tag"] = git_tag else: d["url_key"] = "url:" d["fn_key"] = "fn:" d["git_url_key"] = "" d["git_tag_key"] = "" d["git_url"] = "" d["git_tag"] = "" if args.version: raise NotImplementedError("Package versions from CRAN are not yet implemented") [version] = args.version d["version"] = version d["cran_version"] = cran_package["Version"] # Conda versions cannot have -. Conda (verlib) will treat _ as a . d["conda_version"] = d["cran_version"].replace("-", "_") if args.version_compare: sys.exit(not version_compare(dir_path, d["conda_version"])) if not is_github_url: d["filename"] = "{cran_packagename}_{cran_version}.tar.gz".format(**d) if args.archive: d["cranurl"] = ( INDENT + args.cran_url + "src/contrib/" + d["filename"] + INDENT + args.cran_url + "src/contrib/" + "Archive/" + d["cran_packagename"] + "/" + d["filename"] ) else: d["cranurl"] = " " + args.cran_url + "src/contrib/" + d["filename"] d["cran_metadata"] = "\n".join(["# %s" % l for l in cran_package["orig_lines"] if l]) # XXX: We should maybe normalize these d["license"] = cran_package.get("License", "None") if "License_is_FOSS" in cran_package: d["license"] += " (FOSS)" if cran_package.get("License_restricts_use", None) == "yes": d["license"] += " (Restricts use)" if "URL" in cran_package: d["home_comment"] = "" d["homeurl"] = " " + yaml_quote_string(cran_package["URL"]) if "Description" in cran_package: d["summary_comment"] = "" d["summary"] = " " + yaml_quote_string(cran_package["Description"]) if "Suggests" in cran_package: d["suggests"] = "# Suggests: %s" % cran_package["Suggests"] else: d["suggests"] = "" # Every package depends on at least R. # I'm not sure what the difference between depends and imports is. depends = [s.strip() for s in cran_package.get("Depends", "").split(",") if s.strip()] imports = [s.strip() for s in cran_package.get("Imports", "").split(",") if s.strip()] links = [s.strip() for s in cran_package.get("LinkingTo", "").split(",") if s.strip()] dep_dict = {} for s in set(chain(depends, imports, links)): match = VERSION_DEPENDENCY_REGEX.match(s) if not match: sys.exit("Could not parse version from dependency of %s: %s" % (package, s)) name = match.group("name") archs = match.group("archs") relop = match.group("relop") or "" version = match.group("version") or "" version = version.replace("-", "_") # If there is a relop there should be a version assert not relop or version if archs: sys.exit("Don't know how to handle archs from dependency of " "package %s: %s" % (package, s)) dep_dict[name] = "{relop}{version}".format(relop=relop, version=version) if "R" not in dep_dict: dep_dict["R"] = "" for dep_type in ["build", "run"]: deps = [] for name in sorted(dep_dict): if name in R_BASE_PACKAGE_NAMES: continue if name == "R": # Put R first if d["cran_packagename"] in R_RECOMMENDED_PACKAGE_NAMES and dep_type == "build": # On Linux and OS X, r is a metapackage depending on # r-base and r-recommended. Recommended packages cannot # build depend on r as they would then build depend on # themselves and the built package would end up being # empty (because conda would find no new files) r_name = "r-base" else: r_name = "r" # We don't include any R version restrictions because we # always build R packages against an exact R version deps.insert(0, "{indent}{r_name}".format(indent=INDENT, r_name=r_name)) else: conda_name = "r-" + name.lower() # The r package on Windows includes the recommended packages if name in R_RECOMMENDED_PACKAGE_NAMES: end = " # [not win]" else: end = "" if dep_dict[name]: deps.append( "{indent}{name} {version}{end}".format( name=conda_name, version=dep_dict[name], end=end, indent=INDENT ) ) else: deps.append("{indent}{name}{end}".format(name=conda_name, indent=INDENT, end=end)) if args.recursive: if not exists(join(output_dir, conda_name)): args.packages.append(name) if cran_package.get("NeedsCompilation", "no") == "yes": if dep_type == "build": deps.append("{indent}gcc # [not win]".format(indent=INDENT)) else: deps.append("{indent}libgcc # [not win]".format(indent=INDENT)) d["%s_depends" % dep_type] = "".join(deps) for package in package_dicts: d = package_dicts[package] name = d["packagename"] # Normalize the metadata values d = { k: unicodedata.normalize("NFKD", compat.text_type(v)).encode("ascii", "ignore").decode() for k, v in compat.iteritems(d) } makedirs(join(output_dir, name)) print("Writing recipe for %s" % package.lower()) with open(join(output_dir, name, "meta.yaml"), "w") as f: f.write(clear_trailing_whitespace(CRAN_META.format(**d))) with open(join(output_dir, name, "build.sh"), "w") as f: f.write(CRAN_BUILD_SH.format(**d)) with open(join(output_dir, name, "bld.bat"), "w") as f: f.write(CRAN_BLD_BAT.format(**d)) print("Done")
def skeletonize(in_packages, output_dir=".", output_suffix="", add_maintainer=None, version=None, git_tag=None, cran_url=None, recursive=False, archive=True, version_compare=False, update_policy='', r_interp='r-base', use_binaries_ver=None, use_noarch_generic=False, use_rtools_win=False, config=None, variant_config_files=None): output_dir = realpath(output_dir) config = get_or_merge_config(config, variant_config_files=variant_config_files) if not cran_url: with TemporaryDirectory() as t: _variant = get_package_variants(t, config)[0] cran_url = ensure_list(_variant.get('cran_mirror', DEFAULT_VARIANTS['cran_mirror']))[0] if len(in_packages) > 1 and version_compare: raise ValueError("--version-compare only works with one package at a time") if update_policy == 'error' and not in_packages: raise ValueError("At least one package must be supplied") package_dicts = {} package_list = [] cran_url = cran_url.rstrip('/') cran_metadata = get_cran_metadata(cran_url, output_dir) # r_recipes_in_output_dir = [] # recipes = listdir(output_dir) # for recipe in recipes: # if not recipe.startswith('r-') or not isdir(recipe): # continue # r_recipes_in_output_dir.append(recipe) for package in in_packages: inputs_dict = package_to_inputs_dict(output_dir, output_suffix, git_tag, package) if inputs_dict: package_dicts.update({inputs_dict['pkg-name']: {'inputs': inputs_dict}}) for package_name, package_dict in package_dicts.items(): package_list.append(package_name) while package_list: inputs = package_dicts[package_list.pop()]['inputs'] location = inputs['location'] pkg_name = inputs['pkg-name'] is_github_url = location and 'github.com' in location is_tarfile = location and isfile(location) and tarfile.is_tarfile(location) url = inputs['location'] dir_path = inputs['new-location'] print("Making/refreshing recipe for {}".format(pkg_name)) # Bodges GitHub packages into cran_metadata if is_github_url or is_tarfile: rm_rf(config.work_dir) if is_github_url: m = metadata.MetaData.fromdict({'source': {'git_url': location}}, config=config) source.git_source(m.get_section('source'), m.config.git_cache, m.config.work_dir) new_git_tag = git_tag if git_tag else get_latest_git_tag(config) p = subprocess.Popen(['git', 'checkout', new_git_tag], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=config.work_dir) stdout, stderr = p.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if p.returncode: sys.exit("Error: 'git checkout %s' failed (%s).\nInvalid tag?" % (new_git_tag, stderr.strip())) if stdout: print(stdout, file=sys.stdout) if stderr: print(stderr, file=sys.stderr) else: m = metadata.MetaData.fromdict({'source': {'url': location}}, config=config) source.unpack(m.get_section('source'), m.config.work_dir, m.config.src_cache, output_dir, m.config.work_dir) DESCRIPTION = join(config.work_dir, "DESCRIPTION") if not isfile(DESCRIPTION): sub_description_pkg = join(config.work_dir, 'pkg', "DESCRIPTION") sub_description_name = join(config.work_dir, location.split('/')[-1], "DESCRIPTION") if isfile(sub_description_pkg): DESCRIPTION = sub_description_pkg elif isfile(sub_description_name): DESCRIPTION = sub_description_name else: sys.exit("%s does not appear to be a valid R package " "(no DESCRIPTION file in %s, %s)" % (location, sub_description_pkg, sub_description_name)) with open(DESCRIPTION) as f: description_text = clear_whitespace(f.read()) d = dict_from_cran_lines(remove_package_line_continuations( description_text.splitlines())) d['orig_description'] = description_text package = d['Package'].lower() cran_metadata[package] = d else: package = pkg_name if pkg_name not in cran_metadata: sys.exit("Package %s not found" % pkg_name) # Make sure package always uses the CRAN capitalization package = cran_metadata[package.lower()]['Package'] if not is_github_url and not is_tarfile: session = get_session(output_dir) cran_metadata[package.lower()].update(get_package_metadata(cran_url, package, session)) cran_package = cran_metadata[package.lower()] package_dicts[package.lower()].update( { 'cran_packagename': package, 'packagename': 'r-' + package.lower(), 'patches': '', 'build_number': 0, 'build_depends': '', 'host_depends': '', 'run_depends': '', # CRAN doesn't seem to have this metadata :( 'home_comment': '#', 'homeurl': '', 'summary_comment': '#', 'summary': '', }) d = package_dicts[package.lower()] d['binary1'] = '' d['binary2'] = '' if version: d['version'] = version raise NotImplementedError("Package versions from CRAN are not yet implemented") d['cran_version'] = cran_package['Version'] # Conda versions cannot have -. Conda (verlib) will treat _ as a . d['conda_version'] = d['cran_version'].replace('-', '_') if version_compare: sys.exit(not version_compare(dir_path, d['conda_version'])) patches = [] script_env = [] extra_recipe_maintainers = [] build_number = 0 if update_policy.startswith('merge') and inputs['old-metadata']: m = inputs['old-metadata'] patches = make_array(m, 'source/patches') script_env = make_array(m, 'build/script_env') extra_recipe_maintainers = make_array(m, 'extra/recipe-maintainers', add_maintainer) if m.version() == d['conda_version']: build_number = int(m.get_value('build/number', 0)) build_number += 1 if update_policy == 'merge-incr-build-num' else 0 if add_maintainer: new_maintainer = "{indent}{add_maintainer}".format(indent=INDENT, add_maintainer=add_maintainer) if new_maintainer not in extra_recipe_maintainers: if not len(extra_recipe_maintainers): # We hit this case when there is no existing recipe. extra_recipe_maintainers = make_array({}, 'extra/recipe-maintainers', True) extra_recipe_maintainers.append(new_maintainer) if len(extra_recipe_maintainers): extra_recipe_maintainers[1:].sort() extra_recipe_maintainers.insert(0, "extra:\n ") d['extra_recipe_maintainers'] = ''.join(extra_recipe_maintainers) d['patches'] = ''.join(patches) d['script_env'] = ''.join(script_env) d['build_number'] = build_number cached_path = None cran_layout = {'source': {'selector': '{others}', 'dir': 'src/contrib/', 'ext': '.tar.gz', # If we had platform filters we would change this to: # build_for_linux or is_github_url or is_tarfile 'use_this': True}, 'win-64': {'selector': 'win64', 'dir': 'bin/windows/contrib/{}/'.format(use_binaries_ver), 'ext': '.zip', 'use_this': True if use_binaries_ver else False}, 'osx-64': {'selector': 'osx', 'dir': 'bin/macosx/el-capitan/contrib/{}/'.format( use_binaries_ver), 'ext': '.tgz', 'use_this': True if use_binaries_ver else False}} available = {} for archive_type, archive_details in iteritems(cran_layout): contrib_url = '' if archive_details['use_this']: if is_tarfile: filename = basename(location) contrib_url = relpath(location, dir_path) contrib_url_rendered = package_url = contrib_url sha256 = hashlib.sha256() cached_path = location elif not is_github_url: filename_rendered = '{}_{}{}'.format( package, d['cran_version'], archive_details['ext']) filename = '{}_{{{{ version }}}}'.format(package) + archive_details['ext'] contrib_url = '{{{{ cran_mirror }}}}/{}'.format(archive_details['dir']) contrib_url_rendered = cran_url + '/{}'.format(archive_details['dir']) package_url = contrib_url_rendered + filename_rendered sha256 = hashlib.sha256() print("Downloading {} from {}".format(archive_type, package_url)) # We may need to inspect the file later to determine which compilers are needed. cached_path, _ = source.download_to_cache( config.src_cache, '', {'url': package_url, 'fn': archive_type + '-' + filename_rendered}) available_details = {} available_details['selector'] = archive_details['selector'] if cached_path: sha256.update(open(cached_path, 'rb').read()) available_details['filename'] = filename available_details['contrib_url'] = contrib_url available_details['contrib_url_rendered'] = contrib_url_rendered available_details['cranurl'] = package_url available_details['hash_entry'] = 'sha256: {}'.format(sha256.hexdigest()) available_details['cached_path'] = cached_path # This is rubbish; d[] should be renamed global[] and should be # merged into source and binaryN. if archive_type == 'source': if is_github_url: available_details['url_key'] = '' available_details['fn_key'] = '' available_details['git_url_key'] = 'git_url:' available_details['git_tag_key'] = 'git_tag:' hash_msg = '# You can add a hash for the file here, (md5, sha1 or sha256)' available_details['hash_entry'] = hash_msg available_details['filename'] = '' available_details['cranurl'] = '' available_details['git_url'] = url available_details['git_tag'] = new_git_tag available_details['archive_keys'] = '' else: available_details['url_key'] = 'url:' available_details['fn_key'] = 'fn:' available_details['git_url_key'] = '' available_details['git_tag_key'] = '' available_details['cranurl'] = ' ' + contrib_url + filename available_details['git_url'] = '' available_details['git_tag'] = '' available_details['patches'] = d['patches'] available[archive_type] = available_details # Figure out the selectors according to what is available. _all = ['linux', 'win32', 'win64', 'osx'] from_source = _all[:] binary_id = 1 for archive_type, archive_details in iteritems(available): if archive_type != 'source': sel = archive_details['selector'] from_source.remove(sel) binary_id += 1 else: for k, v in iteritems(archive_details): d[k] = v if from_source == _all: sel_src = "" sel_src_and_win = ' # [win]' sel_src_not_win = ' # [not win]' else: sel_src = ' # [' + ' or '.join(from_source) + ']' sel_src_and_win = ' # [' + ' or '.join(fs for fs in from_source if fs.startswith('win')) + ']' sel_src_not_win = ' # [' + ' or '.join(fs for fs in from_source if not fs.startswith('win')) + ']' d['sel_src'] = sel_src d['sel_src_and_win'] = sel_src_and_win d['sel_src_not_win'] = sel_src_not_win if 'source' in available: available_details = available['source'] available_details['sel'] = sel_src filename = available_details['filename'] if 'contrib_url' in available_details: contrib_url = available_details['contrib_url'] if archive: if is_tarfile: available_details['cranurl'] = (INDENT + contrib_url) else: available_details['cranurl'] = (INDENT + contrib_url + filename + sel_src + INDENT + contrib_url + 'Archive/{}/'.format(package) + filename + sel_src) else: available_details['cranurl'] = ' ' + contrib_url + filename + sel_src if not is_github_url: available_details['archive_keys'] = '{fn_key} {filename} {sel}\n' \ ' {url_key}{sel}' \ ' {cranurl}\n' \ ' {hash_entry}{sel}'.format( **available_details) d['cran_metadata'] = '\n'.join(['# %s' % l for l in cran_package['orig_lines'] if l]) # Render the source and binaryN keys binary_id = 1 for archive_type, archive_details in iteritems(available): if archive_type == 'source': d['source'] = SOURCE_META.format(**archive_details) else: archive_details['sel'] = ' # [' + archive_details['selector'] + ']' d['binary' + str(binary_id)] = BINARY_META.format(**archive_details) binary_id += 1 # XXX: We should maybe normalize these d['license'] = cran_package.get("License", "None") d['license_family'] = guess_license_family(d['license'], allowed_license_families) if 'License_is_FOSS' in cran_package: d['license'] += ' (FOSS)' if cran_package.get('License_restricts_use') == 'yes': d['license'] += ' (Restricts use)' if "URL" in cran_package: d['home_comment'] = '' d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL']) else: # use CRAN page as homepage if nothing has been specified d['home_comment'] = '' if is_github_url: d['homeurl'] = ' {}'.format(location) else: d['homeurl'] = ' https://CRAN.R-project.org/package={}'.format(package) if not use_noarch_generic or cran_package.get("NeedsCompilation", 'no') == 'yes': d['noarch_generic'] = '' else: d['noarch_generic'] = 'noarch: generic' if 'Description' in cran_package: d['summary_comment'] = '' d['summary'] = ' ' + yaml_quote_string(cran_package['Description']) if "Suggests" in cran_package: d['suggests'] = "# Suggests: %s" % cran_package['Suggests'] else: d['suggests'] = '' # Every package depends on at least R. # I'm not sure what the difference between depends and imports is. depends = [s.strip() for s in cran_package.get('Depends', '').split(',') if s.strip()] imports = [s.strip() for s in cran_package.get('Imports', '').split(',') if s.strip()] links = [s.strip() for s in cran_package.get("LinkingTo", '').split(',') if s.strip()] dep_dict = {} seen = set() for s in list(chain(imports, depends, links)): match = VERSION_DEPENDENCY_REGEX.match(s) if not match: sys.exit("Could not parse version from dependency of %s: %s" % (package, s)) name = match.group('name') if name in seen: continue seen.add(name) archs = match.group('archs') relop = match.group('relop') or '' ver = match.group('version') or '' ver = ver.replace('-', '_') # If there is a relop there should be a version assert not relop or ver if archs: sys.exit("Don't know how to handle archs from dependency of " "package %s: %s" % (package, s)) dep_dict[name] = '{relop}{version}'.format(relop=relop, version=ver) if 'R' not in dep_dict: dep_dict['R'] = '' need_git = is_github_url if cran_package.get("NeedsCompilation", 'no') == 'yes': with tarfile.open(available['source']['cached_path']) as tf: need_f = any([f.name.lower().endswith(('.f', '.f90', '.f77')) for f in tf]) # Fortran builds use CC to perform the link (they do not call the linker directly). need_c = True if need_f else \ any([f.name.lower().endswith('.c') for f in tf]) need_cxx = any([f.name.lower().endswith(('.cxx', '.cpp', '.cc', '.c++')) for f in tf]) need_autotools = any([f.name.lower().endswith('/configure') for f in tf]) need_make = True if any((need_autotools, need_f, need_cxx, need_c)) else \ any([f.name.lower().endswith(('/makefile', '/makevars')) for f in tf]) else: need_c = need_cxx = need_f = need_autotools = need_make = False if 'Rcpp' in dep_dict or 'RcppArmadillo' in dep_dict: need_cxx = True if need_cxx: need_c = True for dep_type in ['build', 'host', 'run']: deps = [] # Put non-R dependencies first. if dep_type == 'build': if need_c: deps.append("{indent}{{{{ compiler('c') }}}} {sel}".format( indent=INDENT, sel=sel_src_not_win)) if need_cxx: deps.append("{indent}{{{{ compiler('cxx') }}}} {sel}".format( indent=INDENT, sel=sel_src_not_win)) if need_f: deps.append("{indent}{{{{ compiler('fortran') }}}}{sel}".format( indent=INDENT, sel=sel_src_not_win)) if use_rtools_win: need_c = need_cxx = need_f = need_autotools = need_make = False deps.append("{indent}{{{{native}}}}rtools {sel}".format( indent=INDENT, sel=sel_src_and_win)) deps.append("{indent}{{{{native}}}}extsoft {sel}".format( indent=INDENT, sel=sel_src_and_win)) if need_c or need_cxx or need_f: deps.append("{indent}{{{{native}}}}toolchain {sel}".format( indent=INDENT, sel=sel_src_and_win)) if need_autotools or need_make or need_git: deps.append("{indent}{{{{posix}}}}filesystem {sel}".format( indent=INDENT, sel=sel_src_and_win)) if need_git: deps.append("{indent}{{{{posix}}}}git".format(indent=INDENT)) if need_autotools: deps.append("{indent}{{{{posix}}}}sed {sel}".format( indent=INDENT, sel=sel_src_and_win)) deps.append("{indent}{{{{posix}}}}grep {sel}".format( indent=INDENT, sel=sel_src_and_win)) deps.append("{indent}{{{{posix}}}}autoconf {sel}".format( indent=INDENT, sel=sel_src)) deps.append("{indent}{{{{posix}}}}automake-wrapper{sel}".format( indent=INDENT, sel=sel_src_and_win)) deps.append("{indent}{{{{posix}}}}automake {sel}".format( indent=INDENT, sel=sel_src_and_win)) deps.append("{indent}{{{{posix}}}}pkg-config".format(indent=INDENT)) if need_make: deps.append("{indent}{{{{posix}}}}make {sel}".format( indent=INDENT, sel=sel_src)) elif dep_type == 'run': if need_c or need_cxx or need_f: deps.append("{indent}{{{{native}}}}gcc-libs {sel}".format( indent=INDENT, sel=sel_src_and_win)) if dep_type == 'host' or dep_type == 'run': for name in sorted(dep_dict): if name in R_BASE_PACKAGE_NAMES: continue if name == 'R': # Put R first # Regarless of build or run, and whether this is a # recommended package or not, it can only depend on # r_interp since anything else can and will cause # cycles in the dependency graph. The cran metadata # lists all dependencies anyway, even those packages # that are in the recommended group. # We don't include any R version restrictions because # conda-build always pins r-base and mro-base version. deps.insert(0, '{indent}{r_name}'.format(indent=INDENT, r_name=r_interp)) else: conda_name = 'r-' + name.lower() if dep_dict[name]: deps.append('{indent}{name} {version}'.format(name=conda_name, version=dep_dict[name], indent=INDENT)) else: deps.append('{indent}{name}'.format(name=conda_name, indent=INDENT)) if recursive: lower_name = name.lower() if lower_name not in package_dicts: inputs_dict = package_to_inputs_dict(output_dir, output_suffix, git_tag, lower_name) assert lower_name == inputs_dict['pkg-name'], \ "name %s != inputs_dict['pkg-name'] %s" % ( name, inputs_dict['pkg-name']) assert lower_name not in package_list package_dicts.update({lower_name: {'inputs': inputs_dict}}) package_list.append(lower_name) d['%s_depends' % dep_type] = ''.join(deps) for package in package_dicts: d = package_dicts[package] dir_path = d['inputs']['new-location'] if exists(dir_path) and not version_compare: if update_policy == 'error': raise RuntimeError("directory already exists " "(and --update-policy is 'error'): %s" % dir_path) elif update_policy == 'overwrite': rm_rf(dir_path) elif update_policy == 'skip-up-to-date' and up_to_date(cran_metadata, d['inputs']['old-metadata']): continue elif update_policy == 'skip-existing' and d['inputs']['old-metadata']: continue # Normalize the metadata values d = {k: unicodedata.normalize("NFKD", text_type(v)).encode('ascii', 'ignore') .decode() for k, v in iteritems(d)} try: makedirs(join(dir_path)) except: pass print("Writing recipe for %s" % package.lower()) with open(join(dir_path, 'meta.yaml'), 'w') as f: f.write(clear_whitespace(CRAN_META.format(**d))) if not exists(join(dir_path, 'build.sh')) or update_policy == 'overwrite': with open(join(dir_path, 'build.sh'), 'w') as f: if from_source == all: f.write(CRAN_BUILD_SH_SOURCE.format(**d)) elif from_source == []: f.write(CRAN_BUILD_SH_BINARY.format(**d)) else: tpbt = [target_platform_bash_test_by_sel[t] for t in from_source] d['source_pf_bash'] = ' || '.join(['[[ $target_platform ' + s + ' ]]' for s in tpbt]) f.write(CRAN_BUILD_SH_MIXED.format(**d)) if not exists(join(dir_path, 'bld.bat')) or update_policy == 'overwrite': with open(join(dir_path, 'bld.bat'), 'w') as f: if len([fs for fs in from_source if fs.startswith('win')]) == 2: f.write(CRAN_BLD_BAT_SOURCE.format(**d)) else: f.write(CRAN_BLD_BAT_MIXED.format(**d))