def main(): missing = [] unpackaged = [] for pkg in json.loads(DEPENDENCIES.read_text("utf-8")): matches = list(LIBRARY_LICENSES.glob(BASE_GLOB.format(crate=pkg["name"]))) if pkg["name"] in IGNORE: continue if not matches: missing.append(pkg) for match in matches: if "{}/{}".format(LIBRARY_LICENSES.name, match.name) not in LICENSE_FILES: unpackaged.append(pkg) if missing: print('\nLicenses for the following dependencies are missing:\n') print(ruamel_yaml.safe_dump(missing), flush=True) else: print("\nNo missing licenses.", flush=True) if unpackaged: print('\nLicenses for the following dependencies are not in license_file:\n') print(ruamel_yaml.safe_dump(unpackaged), flush=True) else: print("\nNo unpackaged licenses.", flush=True) return len(missing + unpackaged)
def lock(flow, pf, py, lab): output = P.ENVENTURES[flow, pf, py, lab] if not output.parent.exists(): output.parent.mkdir(parents=True) composite = {"name": output.name, CHN: [], DEP: []} for env in P.ENV_DEPS[flow, pf, py, lab]: composite = merge(composite, safe_load(env.read_text())) print(safe_dump(composite, default_flow_style=False), flush=True) return_code = -1 with tempfile.TemporaryDirectory() as td: tdp = Path(td) for mamba_arg in ["--mamba", "--no-mamba"]: env = tdp / "environment.yml" env.write_text(safe_dump(composite, default_flow_style=False)) args = [P.CONDA_EXE, "lock", mamba_arg, "--platform", pf] return_code = subprocess.check_call(args, cwd=td) if return_code == 0: if not output.parent.exists(): output.parent.mkdir(parents=True) output.write_text("\n".join([ EXP, (tdp / f"conda-{pf}.lock").read_text().split(EXP)[1].strip(), ])) break return return_code
def dump(obj, dest=None, **kwargs): if dest is None: return yaml.safe_dump(obj, **kwargs) else: yaml.safe_dump(obj, dest, **kwargs) return None
def make_build_number(feedstock_root, recipe_root, config_file): """ General logic The purpose of this is to ensure that the new compilers have build numbers > 1000 and legacy compilers have a build number < 1000. This is done by reading the build_number_decrement which is rendered into all the recipes. For linux and osx we want to avoid building for the legacy compilers with build numbers > 1000 Example matrix - {'compiler_c': 'toolchain_c', 'build_number_decrement': 1000} - {'compiler_c': 'gcc', 'build_number_decrement': 0} """ specific_config = safe_load(open(config_file)) build_number_dec = int( specific_config.get("build_number_decrement", [0])[0]) use_legacy_compilers = False for key in {"c", "cxx", "fortran"}: if "toolchain_{}".format(key) in specific_config.get( '{}_compiler'.format(key), ""): use_legacy_compilers = True break import conda_build.api rendered_recipe = conda_build.api.render(recipe_path=recipe_root, variants=specific_config) build_numbers = set() for recipe, _, _ in rendered_recipe: build_numbers.add(recipe.get_value("build/number")) if len(build_numbers) > 1: raise ValueError("More than one build number found, giving up") try: build_number_int = int(build_numbers.pop()) if build_number_int < 1000: if not use_legacy_compilers: raise ValueError( "Only legacy compilers only valid with build numbers < 1000" ) new_build_number = build_number_int else: new_build_number = build_number_int - build_number_dec config_dir, filename = os.path.split(config_file) with open(os.path.join(config_dir, "clobber_" + filename), "w") as fo: safe_dump({"build": {"number": new_build_number}}, fo) except ValueError: # This is a NON string build number # we have this for things like the blas mutex and a few other similar cases pass
def test_missing_license(crate): """looks for magic-named files handles at least: library_licenses/<crate-name>-(UN)LICEN(S|C)E-(|-MIT|-APACHE|-ZLIB) COPYING is not a license, but some of the manually-built files need it for clarification """ assert LIBRARY_LICENSES.exists() matches = [ *LIBRARY_LICENSES.glob(f"{crate}-LICEN*"), *LIBRARY_LICENSES.glob(f"{crate}-UNLICEN*"), ] errors = [] if not matches: errors += [f"no license files for {crate}"] for match in matches: if match.name not in META_LICENSE_NAMES: errors += [f"{crate}: {match.name} not in meta.yaml"] assert not errors, ruamel_yaml.safe_dump(DEPENDENCIES[crate], default_flow_style=False)
def lock(flow, pf, py, lab): output = P.ENVENTURES[flow, pf, py, lab] if not output.parent.exists(): output.parent.mkdir(parents=True) composite = {"name": output.name, CHN: [], DEP: []} for env in P.ENV_DEPS[flow, pf, py, lab]: composite = merge(composite, safe_load(env.read_text())) print(safe_dump(composite, default_flow_style=False), flush=True) with tempfile.TemporaryDirectory() as td: tdp = Path(td) env = tdp / "environment.yml" env.write_text(safe_dump(composite, default_flow_style=False)) args = [P.CONDA_EXE, "lock", "--platform", pf] subprocess.check_call(args, cwd=td) if not output.parent.exists(): output.parent.mkdir(parents=True) output.write_text((tdp / f"conda-{pf}.lock").read_text()) return 0
try: # ... to read json i = args.infile.read() if jinja2: # additional files can be used with {% include "file" %} dirs = [os.getcwd(), os.path.dirname(os.path.realpath(__file__)) + "/../top"] loader = jinja2.FileSystemLoader(dirs) env = jinja2.Environment(loader=loader) i = env.from_string(i).render() # render jinja2 # i = jinja2.Template(i).render() # render jinja2 d = json.loads(i) if args.alwaysjson: if pygments: i = highlight(out, JsonLexer(), formatter()) print(i) else: out = yaml.safe_dump(d, indent=args.indent, allow_unicode=True) if pygments: out = highlight(out, YamlLexer(), formatter()) print(out) except json.decoder.JSONDecodeError: try: # ... to read yaml d = yaml.safe_load(i) # plain load was deprecated in PyYAML out = json.dumps(d, indent=args.indent) if pygments: out = highlight(out, JsonLexer(), formatter()) print(out) except yaml.parser.ParserError as exception: print("input error: invalid json or yaml format", file=sys.stderr) print(exception, file=sys.stderr)
def yaml_safe_dump(object): """dump object to string""" return yaml.safe_dump( object, block_seq_indent=2, default_flow_style=False, indent=2 )
args = parser.parse_args() if pygments: if args.color: formatter = Terminal256Formatter else: formatter = NullFormatter try: # ... to read json i = args.infile.read() d = json.loads( i ) if args.alwaysjson: if pygments: i = highlight( out, JsonLexer(), formatter() ) print( i ) else: out = yaml.safe_dump(d, indent=args.indent, allow_unicode=True ) if pygments: out = highlight( out, YamlLexer(), formatter() ) print( out ) except: try: # ... to read yaml d = yaml.load( i ) out = json.dumps(d, indent=args.indent) if pygments: out = highlight(out, JsonLexer(), formatter() ) print(out) except: print("input error: invalid json or yaml format")
def test_is_nonraw_object_directory(setup_teardown_folder): setup_teardown_folder[2].mkdir() result = exob.is_nonraw_object_directory(setup_teardown_folder[2]) assert result is False compare_metafile = setup_teardown_folder[2] / exob.META_FILENAME with compare_metafile.open("w", encoding="utf-8") as f: pass result = exob.is_nonraw_object_directory(setup_teardown_folder[2]) assert result is False remove(setup_teardown_folder[1]) with compare_metafile.open("w", encoding="utf-8") as meta_file: metadata = { exob.EXDIR_METANAME: { exob.VERSION_METANAME: 1} } yaml.safe_dump(metadata, meta_file, default_flow_style=False, allow_unicode=True) result = exob.is_nonraw_object_directory(setup_teardown_folder[2]) assert result is False remove(setup_teardown_folder[1]) with compare_metafile.open("w", encoding="utf-8") as meta_file: metadata = { exob.EXDIR_METANAME: { exob.TYPE_METANAME: "wrong_typename", exob.VERSION_METANAME: 1} } yaml.safe_dump(metadata, meta_file, default_flow_style=False, allow_unicode=True) result = exob.is_nonraw_object_directory(setup_teardown_folder[2]) assert result is False remove(setup_teardown_folder[1]) with compare_metafile.open("w", encoding="utf-8") as meta_file: metadata = { exob.EXDIR_METANAME: { exob.TYPE_METANAME: exob.DATASET_TYPENAME, exob.VERSION_METANAME: 1} } yaml.safe_dump(metadata, meta_file, default_flow_style=False, allow_unicode=True) result = exob.is_nonraw_object_directory(setup_teardown_folder[2]) assert result is True remove(setup_teardown_folder[2]) exob._create_object_directory(pathlib.Path(setup_teardown_folder[2]), exob._default_metadata(exob.DATASET_TYPENAME)) result = exob.is_nonraw_object_directory(setup_teardown_folder[2]) assert result is True
def dump_jekyll_fm(self): return ('---\n' + yaml.safe_dump( self.jekyll_fm, default_flow_style=False, allow_unicode=True) + '---\n')