def test_filename(self): filename = "filename_{}".format(random.randint(100, 200)) selectors = None with mock.patch.object(env, 'from_file') as from_file: spec = YamlFileSpec(filename=filename) spec.environment from_file.assert_called_with(filename, selectors)
def test_get_environment(self): r = random.randint(100, 200) with mock.patch.object(env, 'from_file', return_value=r): spec = YamlFileSpec(name=None, filename='environment.yaml') self.assertEqual(spec.environment, r)
def test_environment_file_exist(self): with mock.patch.object(env, 'from_file', return_value={}): spec = YamlFileSpec(name=None, filename='environment.yaml') self.assertTrue(spec.can_handle())
def test_no_environment_file(self): spec = YamlFileSpec(name=None, filename='not-a-file') self.assertEqual(spec.can_handle(), False)
def pkg_env(environment_file: Path, coex_path: Path, cache_dir: Path) -> None: """Resolve, fetch, and repackage conda env into coex /pkgs directory. Resolve conda environment file to a specific package list via conda solver, then fetch and unpack target packages. Repack into .coex package data in cache_dir or reuse if pre-packed, and assemble into /pkgs under coex_path. Args: environment_file: Standard conda env file, can not contain pip deps. coex_path: Output coex build path. cache_dir: Coex build cache directory. """ # Resolve environment file to dependencies # Logic culled from conda-env spec = YamlFileSpec(filename=str(environment_file)) env = spec.environment logging.info(env.dependencies) assert set(env.dependencies) == { "conda" }, f"coex environments do not support pip dependencies: {env}" channel_urls = [chan for chan in env.channels if chan != "nodefaults"] if "nodefaults" not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) # Setup an dummpy environment resolution for install into /dev/null # Execute fetch-and-extract operations for required conda packages prefix = "/dev/null" channels = IndexedSet(Channel(url) for url in _channel_priority_map) subdirs = IndexedSet( os.path.basename(url) for url in _channel_priority_map) solver = Solver(prefix, channels, subdirs, specs_to_add=env.dependencies["conda"]) transaction: UnlinkLinkTransaction = solver.solve_for_transaction() logging.info(transaction) transaction.download_and_extract() # Resolve all the, now extracted, target packages in the filesystem fetcher: ProgressiveFetchExtract = transaction._pfe target_records: Set[PackageRecord] = set(fetcher.link_precs) logging.debug("target_records=%s", target_records) extracted: Set[PackageCacheRecord] = { next( (pcrec for pcrec in chain(*(PackageCacheData(pkgs_dir).query(precord) for pkgs_dir in context.pkgs_dirs)) if pcrec.is_extracted), None, ) for precord in target_records } logging.debug("extracted=%s", extracted) # Repackage into a single-file .zst in the cache, then copy into the output # package. output_path = coex_path / "pkgs" for e in extracted: extracted_dir = Path(e.extracted_package_dir) pkgname = extracted_dir.name + ".tar.zst" cache_dir.mkdir(parents=True, exist_ok=True) if not (cache_dir / pkgname).exists(): pkg_cmd = ( # tar filtered through zstd # Seeing errors on macos 10.13 image when using --use-compress-program # with arguments, consider (a) installing conda-forge tar or (b) using # a wrapper script if zstd arguments are needed [ "tar", "--use-compress-program", "zstd -T0" if platform.system() != "Darwin" else "zstd", ] # write to archive file + ["-f", str(cache_dir / pkgname)] # chdir to extracted package directory + ["-C", str(extracted_dir)] # and add all package dirs + (["-c"] + [f.name for f in extracted_dir.iterdir()])) logging.info("packaging: %s", pkg_cmd) subprocess.check_call(pkg_cmd) output_path.mkdir(parents=True, exist_ok=True) shutil.copyfile(cache_dir / pkgname, output_path / pkgname)