def main(): parser = argparse.ArgumentParser( description= 'Fetch the source for all of the recipes in the given directory.') parser.add_argument('--recipes-directory', help='The directory to look for recipes.', default='recipes') parser.add_argument( 'cache_directory', help= 'The directory to store the source cache (aka the conda build root).') args = parser.parse_args() source_cache = os.path.abspath(args.cache_directory) recipes_directory = os.path.join(args.recipes_directory) if not os.path.exists(source_cache): os.makedirs(source_cache) # We import conda_build.config, set a value, remove conda_build.config, and re-import it. # We do this because conda_build has import time resolution on some of the conda_build.source # variables (e.g. SRC_CACHE). import conda_build.config conda_build.config.config.croot = source_cache sys.modules.pop('conda_build.source', None) import conda_build.source visited_sources = set() for meta in list_metas(recipes_directory): fetch_to_source_cache(meta, source_cache)
def main(): parser = argparse.ArgumentParser(description='Removing duplicate recipes which are lower down the pecking order.') parser.add_argument('recipes_dirs', nargs="+", help=("The directories containing recipes which should be 'flattened'.")) parser.add_argument('--output-dir', help='Directory which should be created containing flattened recipes.', default='flattened_recipes') args = parser.parse_args() meta_collections = OrderedDict([(recipes_dir, list_metas(recipes_dir)) for recipes_dir in args.recipes_dirs]) flattened = list(flatten_metas(meta_collections.values())) flattened_collections = OrderedDict() for recipe_dir, metas in meta_collections.items(): for meta in metas: if meta in flattened: flattened_collections.setdefault(recipe_dir, []).append(meta) for recipe_dir, metas in flattened_collections.items(): recipes_parent_dir = os.path.dirname(os.path.abspath(recipe_dir)) for meta in metas: # Figure out where the recipe is, relative to the recipe dir meta_dir = os.path.relpath(os.path.abspath(meta.path), recipes_parent_dir) target_locn = os.path.join(args.output_dir, meta_dir) shutil.copytree(meta.path, target_locn) print('Copying {}'.format(meta_dir))
def test_follow_symlink(self): link_dir = self.tmp_dir(prefix='recipes_through_links') os.symlink(os.path.join(self.recipes_root_dir, 'd1'), os.path.join(link_dir, 'd1')) os.symlink(os.path.join(self.recipes_root_dir, 'm1'), os.path.join(link_dir, 'm1')) metas = list_metas(link_dir) names = [meta.name() for meta in metas] self.assertEqual(sorted(names), ['m1', 'm3'])
def test_order_dependent_selector(self): # If we listen to the selectors, we would get a different build order. # As a result of https://github.com/SciTools/conda-build-all/issues/30 # we know that we either have to resolve all dependencies up-front, # or simply ignore all selectors when dealing with sort order (but # emphatically not when building!). metas = list_metas(self.recipes_root_dir) from conda_build_all.builder import sort_dependency_order names = [m.name() for m in sort_dependency_order(metas)] self.assertEqual(names, ['c', 'a', 'b'])
def main(): parser = argparse.ArgumentParser(description='Fetch the source for all of the recipes in the given directory.') parser.add_argument('--recipes-directory', help='The directory to look for recipes.', default='recipes') parser.add_argument('cache_directory', help='The directory to store the source cache (aka the conda build root).') args = parser.parse_args() source_cache = os.path.abspath(args.cache_directory) recipes_directory = os.path.join(args.recipes_directory) if not os.path.exists(source_cache): os.makedirs(source_cache) # We import conda_build.config, set a value, remove conda_build.config, and re-import it. # We do this because conda_build has import time resolution on some of the conda_build.source # variables (e.g. SRC_CACHE). import conda_build.config conda_build.config.config.croot = source_cache sys.modules.pop('conda_build.source', None) import conda_build.source visited_sources = set() for meta in list_metas(recipes_directory): fetch_to_source_cache(meta, source_cache)
def test_default_depth(self): metas = list_metas(self.recipes_root_dir) names = [meta.name() for meta in metas] self.assertEqual(sorted(names), ['m1', 'm2', 'm3', 'm4'])
def test_depth_2(self): metas = list_metas(self.recipes_root_dir, max_depth=2) names = [meta.name() for meta in metas] self.assertEqual(sorted(names), ['m1', 'm2'])