import os from ruamel.yaml import YAML from conda_forge_tick.migrators import ( Version, ExtraJinja2KeysCleanup, ) from test_migrators import run_test_migration VERSION_CF = Version(piggy_back_migrations=[ExtraJinja2KeysCleanup()]) YAML_PATH = os.path.join(os.path.dirname(__file__), 'test_yaml') def test_version_extra_jinja2_keys_cleanup(tmpdir): with open(os.path.join(YAML_PATH, 'version_extra_jinja2_keys.yaml'), 'r') as fp: in_yaml = fp.read() with open( os.path.join(YAML_PATH, 'version_extra_jinja2_keys_correct.yaml'), 'r', ) as fp: out_yaml = fp.read() os.makedirs(os.path.join(tmpdir, 'recipe'), exist_ok=True) run_test_migration( m=VERSION_CF, inp=in_yaml, output=out_yaml,
MetaYamlTypedDict, PackageName, ) logger = logging.getLogger("conda_forge_tick.auto_tick") PR_LIMIT = 5 MAX_PR_LIMIT = 50 MIGRATORS: MutableSequence[Migrator] = [ Version( pr_limit=PR_LIMIT * 2, piggy_back_migrations=[ Jinja2VarsCleanup(), PipMigrator(), LicenseMigrator(), CondaForgeYAMLCleanup(), ExtraJinja2KeysCleanup(), ], ), ] BOT_RERUN_LABEL = { "name": "bot-rerun", "color": "#191970", "description": ( "Apply this label if you want the bot to retry " "issuing a particular pull-request" ), }
import os import pytest from conda_forge_tick.migrators import ( Version, DuplicateLinesCleanup, ) from test_migrators import run_test_migration VERSION_DLC = Version( set(), piggy_back_migrations=[DuplicateLinesCleanup()], ) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") @pytest.mark.parametrize( "slug,clean_slug", [ ("noarch: generic", "noarch: generic"), ("noarch: python", "noarch: python"), ("noarch: generic ", "noarch: generic"), ("noarch: python ", "noarch: python"), ], ) def test_version_duplicate_lines_cleanup(slug, clean_slug, tmpdir): with open(os.path.join(YAML_PATH, "version_duplicate_lines_cleanup.yaml")) as fp: in_yaml = fp.read()
import os import logging import pytest from flaky import flaky from conda_forge_tick.migrators import Version from test_migrators import run_test_migration VERSION = Version(set()) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") @pytest.mark.parametrize( "case,new_ver", [ ("jinja2expr", "1.1.1"), ("weird", "1.6.0"), ("compress", "0.9"), ("onesrc", "2.4.1"), ("multisrc", "2.4.1"), ("jinja2sha", "2.4.1"), ("r", "1.3_2"), ("multisrclist", "2.25.0"), ("jinja2selsha", "4.7.2"), ("jinja2nameshasel", "4.7.2"), ("shaquotes", "0.6.0"), ("cdiff", "0.15.0"), ("selshaurl", "3.7.0"), ("buildbumpmpi", "7.8.0"),
- python - setuptools - cython - numpy run: - python - {{ pin_compatible('numpy') }} test: requires: - pytest - mpmath """ js = JS() version = Version() compiler = Compiler() noarch = Noarch() noarchr = NoarchR() perl = Pinning(removals={"perl"}) pinning = Pinning() rebuild = Rebuild(name='rebuild', cycles=[]) rebuild.filter = lambda x: False blas_rebuild = BlasRebuild(cycles=[]) blas_rebuild.filter = lambda x: False test_list = [ ( js,
about: home: https://github.com/bids/viscm license: MIT license_file: LICENSE license_family: MIT # license_file: '' we need to an issue upstream to get a license in the source dist. summary: A colormap tool extra: recipe-maintainers: - kthyng """ js = JS() version = Version(set()) # compiler = Compiler() noarch = Noarch() noarchr = NoarchR() perl = Pinning(removals={"perl"}) pinning = Pinning() class _Rebuild(NoFilter, Rebuild): pass rebuild = _Rebuild(name="rebuild", cycles=[]) class _BlasRebuild(NoFilter, BlasRebuild):
import os import pytest from conda_forge_tick.migrators import ( Version, Cos7Config, ) from conda_forge_tick.migrators.cos7 import REQUIRED_RE_LINES, _has_line_set from test_migrators import run_test_migration VERSION_COS7 = Version( set(), piggy_back_migrations=[Cos7Config()], ) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") @pytest.mark.parametrize("remove_quay", [False, True]) @pytest.mark.parametrize("case", list(range(len(REQUIRED_RE_LINES)))) def test_version_cos7_config(case, remove_quay, tmpdir): with open(os.path.join(YAML_PATH, "version_cos7_config_simple.yaml")) as fp: in_yaml = fp.read() with open( os.path.join(YAML_PATH, "version_cos7_config_simple_correct.yaml"), ) as fp: out_yaml = fp.read()
import requests import pytest from test_migrators import run_test_migration, run_minimigrator from conda_forge_tick.migrators import ( Version, PipWheelMigrator, ) wheel_mig = PipWheelMigrator() version_migrator_whl = Version( set(), piggy_back_migrations=[wheel_mig], ) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") opentelemetry_instrumentation = """\ {% set name = "opentelemetry-instrumentation" %} {% set version = "0.22b0" %} package: name: {{ name|lower }} version: {{ version }} source: url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/opentelemetry-instrumentation-{{ version }}.tar.gz sha256: dummy_hash
import os from conda_forge_tick.migrators import UpdateConfigSubGuessMigrator, Version from test_migrators import run_test_migration migrator = UpdateConfigSubGuessMigrator() version_migrator = Version(piggy_back_migrations=[migrator]) config_recipe = """\ {% set version = "7.0" %} package: name: readline version: {{ version }} source: url: ftp://ftp.gnu.org/gnu/readline/readline-{{ version }}.tar.gz sha256: 750d437185286f40a369e1e4f4764eda932b9459b5ec9a731628393dd3d32334 build: skip: true # [win] number: 2 run_exports: # change soname at major ver: https://abi-laboratory.pro/tracker/timeline/readline/ - {{ pin_subpackage('readline') }} requirements: build: - pkg-config
if typing.TYPE_CHECKING: from .cli import CLIArgs from .migrators_types import ( MetaYamlTypedDict, PackageName, AttrsTypedDict, ) logger = logging.getLogger("conda_forge_tick.auto_tick") MIGRATORS: MutableSequence[Migrator] = [ Version( pr_limit=10, piggy_back_migrations=[ PipMigrator(), LicenseMigrator(), CondaForgeYAMLCleanup(), ], ), ] BOT_RERUN_LABEL = { "name": "bot-rerun", "color": "#191970", "description": ( "Apply this label if you want the bot to retry " "issueing a particular pull-request" ), }
from conda_forge_tick.migrators import Version, LicenseMigrator from conda_forge_tick.migrators.license import _munge_licenses from test_migrators import run_test_migration LM = LicenseMigrator() VER_LM = Version(piggy_back_migrations=[LM]) version_license = """\ {% set version = "0.8" %} package: name: viscm version: {{ version }} source: url: https://pypi.io/packages/source/v/viscm/viscm-{{ version }}.tar.gz sha256: dca77e463c56d42bbf915197c9b95e98913c85bef150d2e1dd18626b8c2c9c32 build: number: 0 noarch: python script: python -m pip install --no-deps --ignore-installed . requirements: host: - python - pip - numpy run: - python
import os import logging import pytest from conda_forge_tick.migrators import Version from test_migrators import run_test_migration VERSION = Version(set(), dict(), dict()) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") @pytest.mark.parametrize( "case,new_ver", [ ("jinja2expr", "1.1.1"), ("weird", "1.6.0"), ("compress", "0.9"), ("onesrc", "2.4.1"), ("multisrc", "2.4.1"), ("jinja2sha", "2.4.1"), ("r", "1.3_2"), # upstream is not available # ("cb3multi", "6.0.0"), ("multisrclist", "2.25.0"), ("jinja2selsha", "4.7.2"), ("jinja2nameshasel", "4.7.2"), ("shaquotes", "0.6.0"), ("cdiff", "0.15.0"), ("selshaurl", "3.7.0"),
about: home: https://github.com/bids/viscm license: MIT license_file: LICENSE license_family: MIT # license_file: '' we need to an issue upstream to get a license in the source dist. summary: A colormap tool extra: recipe-maintainers: - kthyng """ js = JS() version = Version(set(), dict(), dict()) # compiler = Compiler() noarch = Noarch() noarchr = NoarchR() perl = Pinning(removals={"perl"}) pinning = Pinning() class _Rebuild(NoFilter, Rebuild): pass rebuild = _Rebuild(name="rebuild", cycles=[]) class _BlasRebuild(NoFilter, BlasRebuild):
def main(args: "CLIArgs") -> None: # start profiler profile_profiler = cProfile.Profile() profile_profiler.enable() # logging from .xonsh_utils import env debug = env.get("CONDA_FORGE_TICK_DEBUG", False) if debug: setup_logger(logging.getLogger("conda_forge_tick"), level="debug") else: setup_logger(logging.getLogger("conda_forge_tick")) github_username = env.get("USERNAME", "") github_password = env.get("PASSWORD", "") github_token = env.get("GITHUB_TOKEN") global MIGRATORS mctx, temp, MIGRATORS = initialize_migrators( github_username=github_username, github_password=github_password, dry_run=args.dry_run, github_token=github_token, ) python_nodes = { n for n, v in mctx.graph.nodes("payload") if "python" in v.get("req", "") } python_nodes.update([ k for node_name, node in mctx.graph.nodes("payload") for k in node.get("outputs_names", []) if node_name in python_nodes ], ) imports_by_package, packages_by_import = create_package_import_maps( python_nodes) version_migrator = Version( python_nodes=python_nodes, imports_by_package=imports_by_package, packages_by_import=packages_by_import, pr_limit=PR_LIMIT * 2, piggy_back_migrations=[ Jinja2VarsCleanup(), PipMigrator(), LicenseMigrator(), CondaForgeYAMLCleanup(), ExtraJinja2KeysCleanup(), ], ) MIGRATORS = [version_migrator] + MIGRATORS # compute the time per migrator (num_nodes, time_per_migrator, tot_time_per_migrator) = _compute_time_per_migrator(mctx, ) for i, migrator in enumerate(MIGRATORS): if hasattr(migrator, "name"): extra_name = "-%s" % migrator.name else: extra_name = "" logger.info( "Total migrations for %s%s: %d - gets %f seconds (%f percent)", migrator.__class__.__name__, extra_name, num_nodes[i], time_per_migrator[i], time_per_migrator[i] / tot_time_per_migrator * 100, ) for mg_ind, migrator in enumerate(MIGRATORS): mmctx = MigratorContext(session=mctx, migrator=migrator) migrator.bind_to_ctx(mmctx) good_prs = 0 _mg_start = time.time() effective_graph = mmctx.effective_graph time_per = time_per_migrator[mg_ind] if hasattr(migrator, "name"): extra_name = "-%s" % migrator.name else: extra_name = "" logger.info( "Running migrations for %s%s: %d", migrator.__class__.__name__, extra_name, len(effective_graph.nodes), ) possible_nodes = list(migrator.order(effective_graph, mctx.graph)) # version debugging info if isinstance(migrator, Version): logger.info("possible version migrations:") for node_name in possible_nodes: with effective_graph.nodes[node_name]["payload"] as attrs: logger.info( " node|curr|new|attempts: %s|%s|%s|%d", node_name, attrs.get("version"), attrs.get("new_version"), (attrs.get("new_version_attempts", {}).get( attrs.get("new_version", ""), 0, )), ) for node_name in possible_nodes: with mctx.graph.nodes[node_name]["payload"] as attrs: # Don't let CI timeout, break ahead of the timeout so we make certain # to write to the repo # TODO: convert these env vars _now = time.time() if ((_now - int(env.get("START_TIME", time.time())) > int( env.get("TIMEOUT", 600))) or good_prs >= migrator.pr_limit or (_now - _mg_start) > time_per): break fctx = FeedstockContext( package_name=node_name, feedstock_name=attrs["feedstock_name"], attrs=attrs, ) print("\n", flush=True, end="") logger.info( "%s%s IS MIGRATING %s", migrator.__class__.__name__.upper(), extra_name, fctx.package_name, ) try: # Don't bother running if we are at zero if (args.dry_run or mctx.gh.rate_limit()["resources"] ["core"]["remaining"] == 0): break migrator_uid, pr_json = run( feedstock_ctx=fctx, migrator=migrator, rerender=migrator.rerender, protocol="https", hash_type=attrs.get("hash_type", "sha256"), ) # if migration successful if migrator_uid: d = frozen_to_json_friendly(migrator_uid) # if we have the PR already do nothing if d["data"] in [ existing_pr["data"] for existing_pr in attrs.get("PRed", []) ]: pass else: if not pr_json: pr_json = { "state": "closed", "head": { "ref": "<this_is_not_a_branch>" }, } d["PR"] = pr_json attrs.setdefault("PRed", []).append(d) attrs.update( { "smithy_version": mctx.smithy_version, "pinning_version": mctx.pinning_version, }, ) except github3.GitHubError as e: if e.msg == "Repository was archived so is read-only.": attrs["archived"] = True else: logger.critical( "GITHUB ERROR ON FEEDSTOCK: %s", fctx.feedstock_name, ) if is_github_api_limit_reached(e, mctx.gh): break except URLError as e: logger.exception("URLError ERROR") attrs["bad"] = { "exception": str(e), "traceback": str(traceback.format_exc()).split("\n"), "code": getattr(e, "code"), "url": getattr(e, "url"), } except Exception as e: logger.exception("NON GITHUB ERROR") attrs["bad"] = { "exception": str(e), "traceback": str(traceback.format_exc()).split("\n"), } else: if migrator_uid: # On successful PR add to our counter good_prs += 1 finally: # Write graph partially through if not args.dry_run: dump_graph(mctx.graph) eval_cmd(f"rm -rf {mctx.rever_dir}/*") logger.info(os.getcwd()) for f in glob.glob("/tmp/*"): if f not in temp: eval_cmd(f"rm -rf {f}") if not args.dry_run: logger.info( "API Calls Remaining: %d", mctx.gh.rate_limit()["resources"]["core"]["remaining"], ) logger.info("Done") # stop profiler profile_profiler.disable() # human readable s_stream = io.StringIO() # TODO: There are other ways to do this, with more freedom profile_stats = pstats.Stats(profile_profiler, stream=s_stream).sort_stats("tottime", ) profile_stats.print_stats() # get current time now = datetime.now() current_time = now.strftime("%d-%m-%Y") + "_" + now.strftime("%H_%M_%S") # output to data os.makedirs("profiler", exist_ok=True) with open(f"profiler/{current_time}.txt", "w+") as f: f.write(s_stream.getvalue())
about: home: https://github.com/bids/viscm license: MIT license_file: LICENSE license_family: MIT # license_file: '' we need to an issue upstream to get a license in the source dist. summary: A colormap tool extra: recipe-maintainers: - kthyng """ js = JS() version = Version() lm = LicenseMigrator() version_license_migrator = Version(piggy_back_migrations=[lm]) compiler = Compiler() noarch = Noarch() noarchr = NoarchR() perl = Pinning(removals={"perl"}) pinning = Pinning() class _Rebuild(NoFilter, Rebuild): pass rebuild = _Rebuild(name="rebuild", cycles=[])
def initialize_migrators( github_username: str = "", github_password: str = "", github_token: Optional[str] = None, dry_run: bool = False, ) -> Tuple[MigratorSessionContext, list, MutableSequence[Migrator]]: temp = glob.glob("/tmp/*") gx = load_graph() smithy_version = eval_cmd("conda smithy --version").strip() pinning_version = json.loads( eval_cmd("conda list conda-forge-pinning --json"))[0]["version"] migrators = [] add_arch_migrate(migrators, gx) migration_factory(migrators, gx) add_replacement_migrator( migrators, gx, "matplotlib", "matplotlib-base", ("Unless you need `pyqt`, recipes should depend only on " "`matplotlib-base`."), alt_migrator=MatplotlibBase, ) create_migration_yaml_creator(migrators=migrators, gx=gx) print("rebuild migration graph sizes:", flush=True) for m in migrators: print( f' {getattr(m, "name", m)} graph size: ' f'{len(getattr(m, "graph", []))}', flush=True, ) print(" ", flush=True) mctx = MigratorSessionContext( circle_build_url=os.getenv("CIRCLE_BUILD_URL", ""), graph=gx, smithy_version=smithy_version, pinning_version=pinning_version, github_username=github_username, github_password=github_password, github_token=github_token, dry_run=dry_run, ) print("building package import maps and version migrator", flush=True) python_nodes = { n for n, v in mctx.graph.nodes("payload") if "python" in v.get("req", "") } python_nodes.update([ k for node_name, node in mctx.graph.nodes("payload") for k in node.get("outputs_names", []) if node_name in python_nodes ], ) version_migrator = Version( python_nodes=python_nodes, pr_limit=PR_LIMIT * 4, piggy_back_migrations=[ Jinja2VarsCleanup(), DuplicateLinesCleanup(), PipMigrator(), LicenseMigrator(), CondaForgeYAMLCleanup(), ExtraJinja2KeysCleanup(), Build2HostMigrator(), NoCondaInspectMigrator(), Cos7Config(), ], ) migrators = [version_migrator] + migrators print(" ", flush=True) return mctx, temp, migrators
import os import logging import pytest from conda_forge_tick.migrators import Version from test_migrators import run_test_migration VERSION = Version() YAML_PATH = os.path.join(os.path.dirname(__file__), 'test_yaml') @pytest.mark.parametrize('case,new_ver', [ ('compress', '0.9'), ('onesrc', '2.4.1'), ('multisrc', '2.4.1'), ('jinja2sha', '2.4.1'), ('r', '1.3_2'), ('cb3multi', '6.0.0'), ('multisrclist', '2.25.0'), ('jinja2selsha', '4.7.2'), ('jinja2nameshasel', '4.7.2'), ('shaquotes', '0.6.0'), ('cdiff', '0.15.0'), ('selshaurl', '3.8.0'), ('buildbumpmpi', '7.8.0'), ('multisrclistnoup', '3.11.3'), ('pypiurl', '0.7.1'), ('githuburl', '1.1.0'), ('ccacheerr', '3.7.7'),
import os from ruamel.yaml import YAML from conda_forge_tick.migrators import ( Version, ExtraJinja2KeysCleanup, ) from test_migrators import run_test_migration VERSION_CF = Version( set(), dict(), dict(), piggy_back_migrations=[ExtraJinja2KeysCleanup()], ) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") def test_version_extra_jinja2_keys_cleanup(tmpdir): with open(os.path.join(YAML_PATH, "version_extra_jinja2_keys.yaml"), "r") as fp: in_yaml = fp.read() with open( os.path.join(YAML_PATH, "version_extra_jinja2_keys_correct.yaml"), "r", ) as fp: out_yaml = fp.read()
import os import pytest from ruamel.yaml import YAML from conda_forge_tick.migrators import ( Version, CondaForgeYAMLCleanup, ) from test_migrators import run_test_migration VERSION_CF = Version(piggy_back_migrations=[CondaForgeYAMLCleanup()]) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") @pytest.mark.parametrize( "cases", [ tuple(), ("min_r_ver",), ("min_py_ver",), ("max_r_ver",), ("max_py_ver",), ("max_r_ver", "max_py_ver"), ("compiler_stack", "max_r_ver"), ("compiler_stack"), ], ) def test_version_cfyaml_cleanup(cases, tmpdir): yaml = YAML()
Version, PipMigrator, LicenseMigrator, MigrationYaml, GraphMigrator, Replacement, ArchRebuild, ) if typing.TYPE_CHECKING: from .cli import CLIArgs from .migrators_types import * MIGRATORS: MutableSequence[Migrator] = [ Version(pr_limit=30, piggy_back_migrations=[PipMigrator(), LicenseMigrator()]), ] BOT_RERUN_LABEL = { "name": "bot-rerun", "color": "#191970", "description": "Apply this label if you want the bot to retry issueing a particular pull-request", } def run( feedstock_ctx: FeedstockContext, migrator: Migrator, protocol: str = "ssh", pull_request: bool = True, rerender: bool = True,
NoCondaInspectMigrator, ) from test_migrators import run_test_migration config_migrator = UpdateConfigSubGuessMigrator() guard_testing_migrator = GuardTestingMigrator() cmake_migrator = UpdateCMakeArgsMigrator() cross_python_migrator = CrossPythonMigrator() cross_rbase_migrator = CrossRBaseMigrator() b2h_migrator = Build2HostMigrator() nci_migrator = NoCondaInspectMigrator() version_migrator_autoconf = Version( set(), piggy_back_migrations=[ config_migrator, cmake_migrator, guard_testing_migrator ], ) version_migrator_cmake = Version( set(), piggy_back_migrations=[ cmake_migrator, guard_testing_migrator, cross_rbase_migrator, cross_python_migrator, ], ) version_migrator_python = Version( set(), piggy_back_migrations=[cross_python_migrator], )
from conda_forge_tick.update_deps import ( get_depfinder_comparison, get_grayskull_comparison, generate_dep_hint, make_grayskull_recipe, _update_sec_deps, _merge_dep_comparisons_sec, ) from conda_forge_tick.migrators import Version, DependencyUpdateMigrator import pytest from test_migrators import run_test_migration VERSION = Version( set(), piggy_back_migrations=[DependencyUpdateMigrator(set())], ) @pytest.mark.parametrize( "dp1,dp2,m", [ ({}, {}, {}), ( { "df_minus_cf": {"a"} }, {}, { "df_minus_cf": {"a"} },
import os import pytest from conda_forge_tick.migrators import ( Version, PipMigrator, ) from test_migrators import run_test_migration PC = PipMigrator() VERSION_PC = Version(piggy_back_migrations=[PC]) YAML_PATH = os.path.join(os.path.dirname(__file__), 'test_yaml') @pytest.mark.parametrize('case', ['simple', 'selector']) def test_version_pipcheck(case, tmpdir): with open(os.path.join(YAML_PATH, 'version_usepip_%s.yaml' % case), 'r') as fp: in_yaml = fp.read() with open( os.path.join(YAML_PATH, 'version_usepip_%s_correct.yaml' % case), 'r', ) as fp: out_yaml = fp.read() run_test_migration( m=VERSION_PC, inp=in_yaml,
import os import pytest from ruamel.yaml import YAML from conda_forge_tick.migrators import ( Version, MPIPinRunAsBuildCleanup, ) from conda_forge_tick.migrators.mpi_pin_run_as_build import MPIS from test_migrators import run_test_migration VERSION_CF = Version( set(), piggy_back_migrations=[MPIPinRunAsBuildCleanup()], ) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") @pytest.mark.parametrize( "vals", [ {}, {"mpich": "x.x"}, {"openmpi": "x.x"}, {"openmpi": "x.x", "mpich": "x.x"}, {"blah": "x"}, {"mpich": "x.x", "blah": "x"}, {"openmpi": "x.x", "blah": "x"}, {"openmpi": "x.x", "mpich": "x.x", "blah": "x"}, ],
import os import pytest from conda_forge_tick.migrators import ( Version, PipMigrator, ) from test_migrators import run_test_migration PC = PipMigrator() VERSION_PC = Version(set(), dict(), dict(), piggy_back_migrations=[PC]) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") @pytest.mark.parametrize("case", ["simple", "selector"]) def test_version_pipcheck(case, tmpdir): with open(os.path.join(YAML_PATH, "version_usepip_%s.yaml" % case), "r") as fp: in_yaml = fp.read() with open( os.path.join(YAML_PATH, "version_usepip_%s_correct.yaml" % case), "r", ) as fp: out_yaml = fp.read() run_test_migration( m=VERSION_PC, inp=in_yaml, output=out_yaml, kwargs={"new_version": "0.9"},
GuardTestingMigrator, UpdateCMakeArgsMigrator, CrossPythonMigrator, ) from test_migrators import run_test_migration config_migrator = UpdateConfigSubGuessMigrator() guard_testing_migrator = GuardTestingMigrator() cmake_migrator = UpdateCMakeArgsMigrator() cross_python_migrator = CrossPythonMigrator() version_migrator_autoconf = Version( set(), dict(), dict(), piggy_back_migrations=[ config_migrator, cmake_migrator, guard_testing_migrator ], ) version_migrator_cmake = Version( set(), dict(), dict(), piggy_back_migrations=[ cmake_migrator, guard_testing_migrator, cross_python_migrator, ], ) version_migrator_python = Version( set(),
from flaky import flaky from conda_forge_tick.migrators import Version, LicenseMigrator from conda_forge_tick.migrators.license import _munge_licenses from test_migrators import run_test_migration LM = LicenseMigrator() VER_LM = Version(set(), piggy_back_migrations=[LM]) version_license = """\ {% set version = "0.8" %} package: name: viscm version: {{ version }} source: url: https://pypi.io/packages/source/v/viscm/viscm-{{ version }}.tar.gz sha256: dca77e463c56d42bbf915197c9b95e98913c85bef150d2e1dd18626b8c2c9c32 build: number: 0 noarch: python script: python -m pip install --no-deps --ignore-installed . requirements: host: - python - pip - numpy
import os import pytest from ruamel.yaml import YAML from conda_forge_tick.migrators import ( Version, CondaForgeYAMLCleanup, ) from test_migrators import run_test_migration VERSION_CF = Version( set(), dict(), dict(), piggy_back_migrations=[CondaForgeYAMLCleanup()], ) YAML_PATH = os.path.join(os.path.dirname(__file__), "test_yaml") @pytest.mark.parametrize( "cases", [ tuple(), ("min_r_ver", ), ("min_py_ver", ), ("max_r_ver", ), ("max_py_ver", ), ("max_r_ver", "max_py_ver"), ("compiler_stack", "max_r_ver"),