def execute(args): _, args = parse_args(args) config = Config(**args.__dict__) config.verbose = not args.quiet api.update_index(args.dir, config=config, force=args.force, check_md5=args.check_md5, remove=args.remove)
def test_skip_existing_url(testing_metadata, testing_workdir, capfd): # make sure that it is built outputs = api.build(testing_metadata) # Copy our package into some new folder output_dir = os.path.join(testing_workdir, 'someoutput') platform = os.path.join(output_dir, testing_metadata.config.host_subdir) os.makedirs(platform) copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0]))) # create the index so conda can find the file api.update_index(platform, config=testing_metadata.config) # HACK: manually create noarch location there, so that conda 4.3.2+ considers a valid channel noarch = os.path.join(output_dir, 'noarch') os.makedirs(noarch) api.update_index(noarch, config=testing_metadata.config) testing_metadata.config.skip_existing = True testing_metadata.config.channel_urls = [url_path(output_dir)] api.build(testing_metadata) output, error = capfd.readouterr() assert "are already built" in output assert url_path(testing_metadata.config.croot) in output
def execute(args): _, args = parse_args(args) api.update_index(args.dir, check_md5=args.check_md5, channel_name=args.channel_name, threads=args.threads, subdir=args.subdir, patch_generator=args.patch_generator, verbose=args.verbose, progress=args.progress, hotfix_source_repo=args.hotfix_source_repo, current_index_versions=args.current_index_versions_file)
def test_index_of_removed_pkg(testing_metadata): out_files = api.build(testing_metadata) for f in out_files: os.remove(f) api.update_index(testing_metadata.config.croot) with open(os.path.join(testing_metadata.config.croot, subdir, 'repodata.json')) as f: repodata = json.load(f) assert not repodata['packages']
def test_run_exports_in_subpackage(testing_metadata): p1 = testing_metadata.copy() p1.meta['outputs'] = [{'name': 'has_run_exports', 'run_exports': 'bzip2 1.0'}] output = api.build(p1)[0] api.update_index(os.path.dirname(output), config=testing_metadata.config) p2 = testing_metadata.copy() p2.meta['requirements']['build'] = ['has_run_exports'] p2.config.index = None p2_final = finalize_metadata(p2) assert 'bzip2 1.0' in p2_final.meta['requirements']['run']
def test_index_of_removed_pkg(testing_metadata): out_files = api.build(testing_metadata) for f in out_files: os.remove(f) api.update_index(testing_metadata.config.croot) with open( os.path.join(testing_metadata.config.croot, subdir, 'repodata.json')) as f: repodata = json.load(f) assert not repodata['packages']
def test_pin_downstream_in_subpackage(testing_metadata, testing_index): p1 = testing_metadata.copy() p1.meta['outputs'] = [{'name': 'has_pin_downstream', 'pin_downstream': 'bzip2 1.0'}] output = api.build(p1)[0] api.update_index(os.path.dirname(output), config=testing_metadata.config) p2 = testing_metadata.copy() p2.meta['requirements']['build'] = ['has_pin_downstream'] p2.config.index = None p2_final = finalize_metadata(p2, None) assert 'bzip2 1.0' in p2_final.meta['requirements']['run']
def test_patch_instructions_with_missing_subdir(testing_workdir): os.makedirs('linux-64') os.makedirs('zos-z') api.update_index('.') # we use conda-forge's patch instructions because they don't have zos-z data, and that triggers an error pkg = "conda-forge-repodata-patches" url = "https://anaconda.org/conda-forge/{0}/20180828/download/noarch/{0}-20180828-0.tar.bz2".format( pkg) patch_instructions = download(url, os.path.join(os.getcwd(), "patches.tar.bz2")) api.update_index('.', patch_generator=patch_instructions)
def execute(args): _, args = parse_args(args) api.update_index(args.dir, check_md5=args.check_md5, channel_name=args.channel_name, threads=args.threads, subdir=args.subdir, patch_generator=args.patch_generator, verbose=args.verbose, progress=args.progress, hotfix_source_repo=args.hotfix_source_repo, convert_if_not_present=args.convert_if_not_present)
def test_skip_existing_url(testing_workdir, test_config, capfd): # make sure that it is built api.build(empty_sections, config=test_config) output_file = os.path.join(test_config.croot, test_config.subdir, "empty_sections-0.0-0.tar.bz2") platform = os.path.join(testing_workdir, test_config.subdir) copy_into(output_file, os.path.join(platform, os.path.basename(output_file))) # create the index so conda can find the file api.update_index(platform, config=test_config) api.build(os.path.join(metadata_dir, "empty_sections"), skip_existing=True, config=test_config, channel_urls=[url_path(testing_workdir)]) output, error = capfd.readouterr() assert "is already built" in output assert url_path(test_config.croot) in output
def test_skip_existing_url(test_metadata, testing_workdir, capfd): # make sure that it is built output_file = api.get_output_file_path(test_metadata) api.build(test_metadata) # Copy our package into some new folder platform = os.path.join(testing_workdir, test_metadata.config.subdir) copy_into(output_file, os.path.join(platform, os.path.basename(output_file))) # create the index so conda can find the file api.update_index(platform, config=test_metadata.config) test_metadata.config.skip_existing = True test_metadata.config.channel_urls = [url_path(testing_workdir)] api.build(test_metadata) output, error = capfd.readouterr() assert "is already built" in output assert url_path(test_metadata.config.croot) in output
def test_skip_existing_url(test_metadata, testing_workdir, capfd): # make sure that it is built outputs = api.build(test_metadata) # Copy our package into some new folder output_dir = os.path.join(testing_workdir, 'someoutput') platform = os.path.join(output_dir, test_metadata.config.subdir) os.makedirs(platform) copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0]))) # create the index so conda can find the file api.update_index(platform, config=test_metadata.config) test_metadata.config.skip_existing = True test_metadata.config.channel_urls = [url_path(output_dir)] api.build(test_metadata) output, error = capfd.readouterr() assert "is already built" in output assert url_path(test_metadata.config.croot) in output
def build(recipe_dir="./conda-recipe", channel=CHANNEL_DIR_DEFAULT): """Packages an IDS project into a conda package and then adds it to the main IDS conda channel.""" if not os.path.exists(recipe_dir): print("ERROR: Recipe DOES NOT exist at:\n %s", recipe_dir) raise FileNotFoundError else: print(f"Releasing from:\n {recipe_dir}") if check(recipe_dir): output_files = get_output_file_paths(recipe_dir) print(f"Releasing the following packages:\n {output_files}") else: print("Error: Checks Failed") raise Exception conda_build(recipe_dir) for file_path in output_files: if not os.path.exists(file_path): print("Error: Did not release %s", file_path) else: shutil.copy(file_path, os.path.join(channel, "linux-64")) update_index(channel) print(f"Updated the conda channel at:\n {channel}")
def test_update_index(testing_workdir): api.update_index(testing_workdir) files = "repodata.json", "repodata.json.bz2" for f in files: assert os.path.isfile(os.path.join(testing_workdir, 'noarch', f))
def test_no_locking(testing_config): recipe = os.path.join(metadata_dir, 'source_git_jinja2') api.update_index(os.path.join(testing_config.croot, testing_config.subdir), config=testing_config) api.build(recipe, config=testing_config, locking=False)
def test_channel(): path = Path("./tests/data/automation/channel").absolute() update_index(path) yield Channel(path.as_uri()) remove_index(path)
def test_update_index(testing_workdir, testing_config): api.update_index(testing_workdir, testing_config) files = ".index.json", "repodata.json", "repodata.json.bz2" for f in files: assert os.path.isfile(os.path.join(testing_workdir, f))