def main(): _, args = parse_args(sys.argv[1:]) args = args.__dict__ config = Config(**args) channel_urls = get_rc_urls() + get_channel_urls({}) init_api_context() print(f"Updating build index: {(config.output_folder)}\n") update_index(config.output_folder, verbose=config.debug, threads=1) recipe = args["recipe"][0] global solver solver = MambaSolver(channel_urls, context.subdir) solver.replace_channels() cbc, _ = conda_build.variants.get_package_combined_spec(recipe, config=config) if args["test"]: api.test(recipe, config=config) else: api.build( recipe, post=args["post"], build_only=args["build_only"], notest=args["notest"], config=config, variants=args["variants"], )
def main(): _, args = parse_args(sys.argv[1:]) args = args.__dict__ config = Config(**args) init_api_context() config.output_folder = os.path.abspath(config.output_folder) if not os.path.exists(config.output_folder): mkdir_p(config.output_folder) print(f"Updating build index: {(config.output_folder)}\n") update_index(config.output_folder, verbose=config.debug, threads=1) recipe = args["recipe"][0] if args["test"]: api.test(recipe, config=config) else: api.build( recipe, post=args["post"], build_only=args["build_only"], notest=args["notest"], config=config, variants=args["variants"], )
def test_package_test_without_recipe_in_package(testing_workdir, testing_metadata): """Can't test packages after building if recipe is not included. Not enough info to go on.""" testing_metadata.config.include_recipe = False output = api.build(testing_metadata, notest=True)[0] with pytest.raises(IOError): api.test(output, config=testing_metadata.config)
def test_recipe_test(testing_workdir, test_config): # temporarily necessary because we have custom rebuilt svn for longer prefix here test_config.channel_urls = ('conda_build_test', ) recipe = os.path.join(metadata_dir, 'has_prefix_files') api.build(recipe, config=test_config, notest=True) api.test(recipe, config=test_config)
def test_recipe_builds(recipe, test_config, testing_workdir): # These variables are defined solely for testing purposes, # so they can be checked within build scripts os.environ["CONDA_TEST_VAR"] = "conda_test" os.environ["CONDA_TEST_VAR_2"] = "conda_test_2" ok_to_test = api.build(recipe, config=test_config) if ok_to_test: api.test(recipe, config=test_config)
def test_package_with_jinja2_does_not_redownload_source(testing_workdir, test_config): recipe = os.path.join(metadata_dir, 'jinja2_build_str') api.build(recipe, config=test_config, notest=True) output_file = api.get_output_file_path(recipe, config=test_config) # this recipe uses jinja2, which should trigger source download, except that source download # will have already happened in the build stage. # https://github.com/conda/conda-build/issues/1451 api.test(output_file, config=test_config)
def test_package_test_without_recipe_in_package(testing_workdir, testing_metadata): """Can't test packages after building if recipe is not included. Not enough info to go on.""" testing_metadata.config.include_recipe = False output = api.build(testing_metadata, notest=True, copy_test_source_files=True)[0] api.test(output, config=testing_metadata.config)
def test_package_with_jinja2_does_not_redownload_source(testing_workdir, testing_config, mocker): recipe = os.path.join(metadata_dir, 'jinja2_build_str') outputs = api.build(recipe, config=testing_config, notest=True) # this recipe uses jinja2, which should trigger source download, except that source download # will have already happened in the build stage. # https://github.com/conda/conda-build/issues/1451 provide = mocker.patch('conda_build.source.provide') api.test(outputs[0], config=testing_config) assert not provide.called
def test_package_with_jinja2_does_not_redownload_source( testing_workdir, test_config): recipe = os.path.join(metadata_dir, 'jinja2_build_str') api.build(recipe, config=test_config, notest=True) output_file = api.get_output_file_path(recipe, config=test_config) # this recipe uses jinja2, which should trigger source download, except that source download # will have already happened in the build stage. # https://github.com/conda/conda-build/issues/1451 api.test(output_file, config=test_config)
def test_package_with_jinja2_does_not_redownload_source(testing_workdir, testing_config, mocker): recipe = os.path.join(metadata_dir, 'jinja2_build_str') metadata = api.render(recipe, config=testing_config, dirty=True)[0][0] outputs = api.build(metadata, notest=True, anaconda_upload=False) # this recipe uses jinja2, which should trigger source download, except that source download # will have already happened in the build stage. # https://github.com/conda/conda-build/issues/1451 provide = mocker.patch('conda_build.source.provide') api.test(outputs[0], config=metadata.config) assert not provide.called
def test_api_extra_dep(testing_metadata): testing_metadata.meta['test']['imports'] = ['click'] output = api.build(testing_metadata, notest=True, anaconda_upload=False)[0] # extra_deps will add it in api.test(output, config=testing_metadata.config, extra_deps=['click']) # missing click dep will fail tests with pytest.raises(SystemExit): api.test(output, config=testing_metadata.config)
def test_package_test(testing_workdir, test_config): """Test calling conda build -t <package file> - rather than <recipe dir>""" # temporarily necessary because we have custom rebuilt svn for longer prefix here test_config.channel_urls = ('conda_build_test', ) recipe = os.path.join(metadata_dir, 'has_prefix_files') api.build(recipe, config=test_config, notest=True) output_file = api.get_output_file_path(recipe, config=test_config) api.test(output_file, config=test_config)
def call_conda_build(action, config, **kwargs): """ After having set up the stage for boa's mambabuild to use the mamba solver, we delegate the work of building the conda package back to conda-build. Args: action: "build" or "test" config: conda-build's Config Kwargs: additional keyword arguments are passed to conda-build Return: The result of conda-build's build: the built packages """ recipe = config.recipe[0] if action == "test": result = api.test(recipe, config=config, **kwargs) elif action == "build": result = api.build( recipe, post=config.post, build_only=config.build_only, notest=config.notest, config=config, variants=config.variants, **kwargs, ) else: raise ValueError("action should be 'build' or 'test', got: %r" % action) return result
def run(self): # Make sure the metadata has the conda attributes, even if the # distclass isn't CondaDistribution. We primarily do this to simplify # the code below. metadata = self.distribution.metadata for attr in CondaDistribution.conda_attrs: if not hasattr(metadata, attr): setattr(metadata, attr, CondaDistribution.conda_attrs[attr]) # The command line takes precedence if self.buildnum is not None: metadata.conda_buildnum = self.buildnum d = defaultdict(dict) # PyPI allows uppercase letters but conda does not, so we fix the # name here. d['package']['name'] = metadata.name.lower() d['package']['version'] = metadata.version d['build']['number'] = metadata.conda_buildnum # MetaData does the auto stuff if the build string is None d['build']['string'] = metadata.conda_buildstr d['build']['binary_relocation'] = metadata.conda_binary_relocation d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir d['build']['features'] = metadata.conda_features d['build']['track_features'] = metadata.conda_track_features # XXX: I'm not really sure if it is correct to combine requires # and install_requires d['requirements']['run'] = d['requirements']['build'] = \ [spec_from_line(i) for i in (metadata.requires or []) + (getattr(self.distribution, 'install_requires', []) or [])] + ['python'] if hasattr(self.distribution, 'tests_require'): # A lot of packages use extras_require['test'], but # tests_require is the one that is officially supported by # setuptools. d['test']['requires'] = [spec_from_line(i) for i in self.distribution.tests_require or []] d['about']['home'] = metadata.url # Don't worry about classifiers. This isn't skeleton pypi. We # don't need to make this work with random stuff in the wild. If # someone writes their setup.py wrong and this doesn't work, it's # their fault. d['about']['license'] = metadata.license d['about']['summary'] = metadata.description # This is similar logic from conda skeleton pypi entry_points = getattr(self.distribution, 'entry_points', []) if entry_points: if isinstance(entry_points, string_types): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.splitlines()) c = configparser.ConfigParser() entry_points = {} try: c.readfp(StringIO(newstr)) except Exception as err: # This seems to be the best error here raise DistutilsGetoptError("ERROR: entry-points not understood: " + str(err) + "\nThe string was" + newstr) else: for section in c.sections(): if section in ['console_scripts', 'gui_scripts']: value = ['%s=%s' % (option, c.get(section, option)) for option in c.options(section)] entry_points[section] = value else: # Make sure setuptools is added as a dependency below entry_points[section] = None if not isinstance(entry_points, dict): raise DistutilsGetoptError("ERROR: Could not add entry points. They were:\n" + entry_points) else: rs = entry_points.get('scripts', []) cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not rs and not cs and not gs and len(entry_points) > 1: d['requirements']['run'].append('setuptools') d['requirements']['build'].append('setuptools') entry_list = rs + cs + gs if gs and self.config.platform == 'osx': d['build']['osx_is_app'] = True if len(cs + gs) != 0: d['build']['entry_points'] = entry_list if metadata.conda_command_tests is True: d['test']['commands'] = list(map(unicode, pypi.make_entry_tests(entry_list))) if 'setuptools' in d['requirements']['run']: d['build']['preserve_egg_dir'] = True if metadata.conda_import_tests: if metadata.conda_import_tests is True: d['test']['imports'] = ((self.distribution.packages or []) + (self.distribution.py_modules or [])) else: d['test']['imports'] = metadata.conda_import_tests if (metadata.conda_command_tests and not isinstance(metadata.conda_command_tests, bool)): d['test']['commands'] = list(map(unicode, metadata.conda_command_tests)) d = dict(d) m = MetaData.fromdict(d, config=self.config) # Shouldn't fail, but do you really trust the code above? m.check_fields() m.config.set_build_id = False m.config.keep_old_work = True api.build(m, build_only=True) # prevent changes in the build ID from here, so that we're working in the same prefix # Do the install if not PY3: # Command is an old-style class in Python 2 install.run(self) else: super().run() m.config.keep_old_work = False api.build(m, post=True) api.test(m) output_file = api.get_output_file_path(m) if self.anaconda_upload: class args: anaconda_upload = self.anaconda_upload handle_anaconda_upload(output_file, args) else: no_upload_message = """\ # If you want to upload this package to anaconda.org later, type: # # $ anaconda upload %s """ % output_file print(no_upload_message)
def test_package_test(testing_workdir, testing_config): """Test calling conda build -t <package file> - rather than <recipe dir>""" recipe = os.path.join(metadata_dir, 'has_prefix_files') metadata = api.render(recipe, config=testing_config)[0][0] outputs = api.build(metadata, notest=True, anaconda_upload=False) api.test(outputs[0], config=metadata.config)
def test_recipe_test(testing_workdir, testing_config): """Test calling conda build -t <recipe dir>""" recipe = os.path.join(metadata_dir, 'has_prefix_files') metadata = api.render(recipe, config=testing_config)[0][0] api.build(metadata, notest=True, anaconda_upload=False) api.test(recipe, config=metadata.config)
def test_action(recipe, config): return api.test(recipe, move_broken=False, config=config)
def run(self): # Make sure the metadata has the conda attributes, even if the # distclass isn't CondaDistribution. We primarily do this to simplify # the code below. metadata = self.distribution.metadata for attr in CondaDistribution.conda_attrs: if not hasattr(metadata, attr): setattr(metadata, attr, CondaDistribution.conda_attrs[attr]) # The command line takes precedence if self.buildnum is not None: metadata.conda_buildnum = self.buildnum d = defaultdict(dict) # PyPI allows uppercase letters but conda does not, so we fix the # name here. d['package']['name'] = metadata.name.lower() d['package']['version'] = metadata.version d['build']['number'] = metadata.conda_buildnum # MetaData does the auto stuff if the build string is None d['build']['string'] = metadata.conda_buildstr d['build']['binary_relocation'] = metadata.conda_binary_relocation d['build']['preserve_egg_dir'] = metadata.conda_preserve_egg_dir d['build']['features'] = metadata.conda_features d['build']['track_features'] = metadata.conda_track_features # XXX: I'm not really sure if it is correct to combine requires # and install_requires d['requirements']['run'] = d['requirements']['build'] = \ [spec_from_line(i) for i in (metadata.requires or []) + (getattr(self.distribution, 'install_requires', []) or [])] + ['python'] if hasattr(self.distribution, 'tests_require'): # A lot of packages use extras_require['test'], but # tests_require is the one that is officially supported by # setuptools. d['test']['requires'] = [ spec_from_line(i) for i in self.distribution.tests_require or [] ] d['about']['home'] = metadata.url # Don't worry about classifiers. This isn't skeleton pypi. We # don't need to make this work with random stuff in the wild. If # someone writes their setup.py wrong and this doesn't work, it's # their fault. d['about']['license'] = metadata.license d['about']['summary'] = metadata.description # This is similar logic from conda skeleton pypi entry_points = getattr(self.distribution, 'entry_points', []) if entry_points: if isinstance(entry_points, string_types): # makes sure it is left-shifted newstr = "\n".join(x.strip() for x in entry_points.splitlines()) c = configparser.ConfigParser() entry_points = {} try: c.read_file(StringIO(newstr)) except Exception as err: # This seems to be the best error here raise DistutilsGetoptError( "ERROR: entry-points not understood: " + str(err) + "\nThe string was" + newstr) else: for section in c.sections(): if section in ['console_scripts', 'gui_scripts']: value = [ f'{option}={c.get(section, option)}' for option in c.options(section) ] entry_points[section] = value else: # Make sure setuptools is added as a dependency below entry_points[section] = None if not isinstance(entry_points, dict): raise DistutilsGetoptError( "ERROR: Could not add entry points. They were:\n" + entry_points) else: rs = entry_points.get('scripts', []) cs = entry_points.get('console_scripts', []) gs = entry_points.get('gui_scripts', []) # We have *other* kinds of entry-points so we need # setuptools at run-time if not rs and not cs and not gs and len(entry_points) > 1: d['requirements']['run'].append('setuptools') d['requirements']['build'].append('setuptools') entry_list = rs + cs + gs if gs and self.config.platform == 'osx': d['build']['osx_is_app'] = True if len(cs + gs) != 0: d['build']['entry_points'] = entry_list if metadata.conda_command_tests is True: d['test']['commands'] = list( map(unicode, pypi.make_entry_tests(entry_list))) if 'setuptools' in d['requirements']['run']: d['build']['preserve_egg_dir'] = True if metadata.conda_import_tests: if metadata.conda_import_tests is True: d['test']['imports'] = ((self.distribution.packages or []) + (self.distribution.py_modules or [])) else: d['test']['imports'] = metadata.conda_import_tests if (metadata.conda_command_tests and not isinstance(metadata.conda_command_tests, bool)): d['test']['commands'] = list( map(unicode, metadata.conda_command_tests)) d = dict(d) self.config.keep_old_work = True m = MetaData.fromdict(d, config=self.config) # Shouldn't fail, but do you really trust the code above? m.check_fields() m.config.set_build_id = False m.config.variant['python'] = ".".join( (str(sys.version_info.major), str(sys.version_info.minor))) api.build(m, build_only=True, notest=True) self.config = m.config # prevent changes in the build ID from here, so that we're working in the same prefix # Do the install if not PY3: # Command is an old-style class in Python 2 install.run(self) else: super().run() output = api.build(m, post=True, notest=True)[0] api.test(output, config=m.config) m.config.clean() if self.anaconda_upload: class args: anaconda_upload = self.anaconda_upload handle_anaconda_upload(output, args) else: no_upload_message = """\ # If you want to upload this package to anaconda.org later, type: # # $ anaconda upload %s """ % output print(no_upload_message)
def test_package_test(testing_workdir, testing_config): """Test calling conda build -t <package file> - rather than <recipe dir>""" recipe = os.path.join(metadata_dir, 'has_prefix_files') outputs = api.build(recipe, config=testing_config, notest=True) api.test(outputs[0], config=testing_config)
def test_test_output_folder_moves_file(test_metadata, testing_workdir): output_path = api.get_output_file_path(test_metadata) api.build(test_metadata, no_test=True) api.test(output_path, output_folder=testing_workdir) assert not os.path.exists(output_path) assert os.path.isfile(os.path.join(testing_workdir, os.path.basename(output_path)))
def test_action(metadata, config): return api.test(metadata.path, move_broken=False, config=config)
def main(directory, channel_urls=[], inspect_conda_bld_directory=True, config=None): download(directory, test=False, config=config) # if os.path.exists(sys_rc_path): # with open(sys_rc_path, 'r') as filehandler: # old_rc_config = yaml_load(filehandler) or None # else: # old_rc_config = None # with open(sys_rc_path, 'w') as filehandler: # yaml_dump(dict(channels = ["file:///"]),filehandler) packages = list_packages(directory, channel_urls=channel_urls, config=config) graph = networkx.DiGraph() for index, package in enumerate(packages): graph.add_node(package.meta['package']['name'], identifier=index) for package in packages: for dependency in package.meta.get("requirements", {}).get("build", []): dependency = dependency.split()[0] if graph.has_node(dependency): graph.add_edge(dependency, package.meta['package']['name']) for dependency in package.meta.get("requirements", {}).get("run", []): dependency = dependency.split()[0] if graph.has_node(dependency): graph.add_edge(dependency, package.meta['package']['name']) if not networkx.is_directed_acyclic_graph(graph): raise ValueError() outputs = [0] * len(packages) for package in networkx.topological_sort(graph): identifier = graph.node[package]["identifier"] package = packages[identifier] local_config = get_or_merge_config(config, channel_urls=channel_urls) local_config.compute_build_id(package.name()) output_file_path = conda_build.get_output_file_path(package, config=local_config) if isinstance(output_file_path, list): output_file_path = output_file_path.pop() output_file_path = str(output_file_path) if not inspect_conda_bld_directory or not os.path.exists(output_file_path): conda_build.build(package, config=local_config, notest=True) outputs[identifier] = output_file_path download(directory, config=config) graph = networkx.DiGraph() for index, package in enumerate(packages): graph.add_node(package.meta['package']['name'], identifier=index) for package in packages: for dependency in package.meta.get("requirements", {}).get("run", []): dependency = dependency.split()[0] if graph.has_node(dependency): graph.add_edge(dependency, package.meta['package']['name']) for dependency in package.meta.get("test", {}).get("requirements", []): dependency = dependency.split()[0] if graph.has_node(dependency): graph.add_edge(dependency, package.meta['package']['name']) if not networkx.is_directed_acyclic_graph(graph): raise ValueError() for package in networkx.topological_sort(graph): identifier = graph.node[package]["identifier"] local_config = get_or_merge_config(config, channel_urls=channel_urls) local_config.compute_build_id(packages[identifier].name()) conda_build.test(outputs[identifier], config=local_config) # if old_rc_config: # with open(sys_rc_path, 'w') as filehandler: # yaml_dump(old_rc_config, filehandler) # else: # os.remove(sys_rc_path)
def test_metadata_test(test_metadata): api.build(test_metadata, notest=True) api.test(test_metadata)
def test_recipe_builds(recipe, test_config, testing_workdir): # These variables are defined solely for testing purposes, # so they can be checked within build scripts ok_to_test = api.build(recipe, config=test_config) if ok_to_test: api.test(recipe, config=test_config)