def test_context_init(self): # Create a context using: # 1. the local default configuration # 2. forcing a x86-linux configuration asr = AnodSpecRepository(self.spec_dir) ac = AnodContext(asr) assert ac.default_env.build == BaseEnv().build assert ac.default_env.host == BaseEnv().host assert ac.default_env.target == BaseEnv().target self.create_context()
def test_configure(): class AnodConf(Anod): @Anod.primitive() def build(self): c = Configure(self) return c.cmdline() Anod.sandbox = SandBox() Anod.sandbox.root_dir = os.getcwd() Anod.sandbox.create_dirs() ac = AnodConf(qualifier="", kind="build", jobs=10) AnodDriver(anod_instance=ac, store=None).activate(Anod.sandbox, None) ac.build_space.create() # Configure() can add $CONFIG_SHELL in the command line # Check that the two other arguments are as expected assert ac.build()["cmd"][-2:] == [ "../src/configure", "--build=%s" % ac.env.build.triplet, ] # Check with canadian env canadian_env = BaseEnv() canadian_env.set_build("x86-windows") canadian_env.set_host("x86-linux") canadian_env.set_target("arm-elf") assert canadian_env.is_canadian ac2 = AnodConf(qualifier="", kind="build", jobs=10, env=canadian_env) AnodDriver(anod_instance=ac2, store=None).activate(Anod.sandbox, None) ac2.build_space.create() ac2_cmd = ac2.build()["cmd"] assert "--build=i686-pc-mingw32" in ac2_cmd assert "--host=i686-pc-linux-gnu" in ac2_cmd assert "--target=arm-eabi" in ac2_cmd # Check with cross env cross_env = BaseEnv() cross_env.set_target("arm-elf") ac3 = AnodConf(qualifier="", kind="build", jobs=10, env=cross_env) AnodDriver(anod_instance=ac3, store=None).activate(Anod.sandbox, None) ac3.build_space.create() assert "--target=arm-eabi" in ac3.build()["cmd"]
def test_force_download_after_install(self): """Test two deps on the same spec with installation and download. Here we have two specs having an "installation" and a "download" depdendency on the same spec (spec_build). When the two are set together the scheduler find the proper solution: download. """ env = BaseEnv() env.set_build("x86_64-linux", "rhes8", "mylinux") asr = AnodSpecRepository(self.spec_dir) ac = AnodContext(asr, default_env=env) ac.add_anod_action("spec_install_dep", env=ac.default_env, primitive="build") with pytest.raises(SchedulingError) as err: ac.schedule(ac.always_create_source_resolver) assert "This plan resolver cannot decide" in str(err) ac.add_anod_action("spec_download_dep", env=ac.default_env, primitive="build") result = ac.schedule(ac.always_create_source_resolver) assert set(result.vertex_data.keys()) == { "root", "mylinux.x86_64-linux.spec_install_dep.build", "mylinux.x86_64-linux.spec_download_dep.build", "mylinux.x86_64-linux.spec_build.install", "mylinux.x86_64-linux.spec_build.download_bin", }
def __init__( self, spec_repository: AnodSpecRepository, default_env: Optional[BaseEnv] = None, reject_duplicates: bool = False, ): """Initialize a new context. :param spec_repository: an Anod repository :param default_env: an env that should be considered as the default for the current context. Mainly useful to simulate another server context. If None then we assume that the context if the local server :param reject_duplicates: if True, raise SchedulingError when two duplicated action are generated """ self.repo = spec_repository if default_env is None: self.default_env = BaseEnv() else: self.default_env = default_env.copy() self.reject_duplicates = reject_duplicates self.tree = DAG() self.root = Root() self.dependencies: Dict[str, Dict[str, Tuple[Dependency, Anod]]] = {} self.add(self.root) self.cache: Dict[CacheKeyType, Anod] = {} self.sources: Dict[str, Tuple[str, SourceBuilder]] = {}
def test_force_download_after_build(self): """Test two deps on the same spec with build and download. Here we have two specs having an "build_tree" and a "download" depdendency on the same spec (spec_build). When the two are set together the scheduler cannot find a solution. """ env = BaseEnv() env.set_build("x86_64-linux", "rhes8", "mylinux") asr = AnodSpecRepository(self.spec_dir) ac = AnodContext(asr, default_env=env) ac.add_anod_action("spec_build_dep", env=ac.default_env, primitive="build") # Verify that, when scheduling this plan, the scheduler ask for # having an explicit build with pytest.raises(SchedulingError) as err: ac.schedule(ac.always_create_source_resolver) assert "A spec in the plan has a build_tree dependency on spec_build" in str( err) # Verify that after adding a download dep, the scheduler now # warns that he cannot resolve the plan ac.add_anod_action("spec_download_dep", env=ac.default_env, primitive="build") with pytest.raises(SchedulingError) as err: ac.schedule(ac.always_create_source_resolver) assert "explicit DownloadBinary decision made" in str(err)
def test_force_download_before_install(self): """Test two deps on the same spec with installation and download. Same as test_force_download_after_install but in a different order. The end result should be the same. """ env = BaseEnv() env.set_build("x86_64-linux", "rhes8", "mylinux") asr = AnodSpecRepository(self.spec_dir) ac = AnodContext(asr, default_env=env) ac.add_anod_action("spec_download_dep", env=ac.default_env, primitive="build") result = ac.schedule(ac.always_create_source_resolver) assert set(result.vertex_data.keys()) == { "root", "mylinux.x86_64-linux.spec_download_dep.build", "mylinux.x86_64-linux.spec_build.install", "mylinux.x86_64-linux.spec_build.download_bin", } ac.add_anod_action("spec_install_dep", env=ac.default_env, primitive="build") ac.schedule(ac.always_create_source_resolver) result = ac.schedule(ac.always_create_source_resolver) assert set(result.vertex_data.keys()) == { "root", "mylinux.x86_64-linux.spec_install_dep.build", "mylinux.x86_64-linux.spec_download_dep.build", "mylinux.x86_64-linux.spec_build.install", "mylinux.x86_64-linux.spec_build.download_bin", }
def __init__(self, spec_repository, default_env=None, reject_duplicates=False): """Initialize a new context. :param spec_repository: an Anod repository :type spec_repository: e3.anod.loader.AnodSpecRepository :param default_env: an env that should be considered as the default for the current context. Mainly useful to simulate another server context. If None then we assume that the context if the local server :type default_env: BaseEnv | None :param reject_duplicates: if True, raise SchedulingError when two duplicated action are generated :type reject_duplicates: bool """ self.repo = spec_repository if default_env is None: self.default_env = BaseEnv() else: self.default_env = default_env.copy() self.reject_duplicates = reject_duplicates self.tree = DAG() self.root = Root() self.dependencies = {} self.add(self.root) self.cache = {} self.sources = {}
def env(self, parent, default_env): """Retrieve env for the dependency. :param parent: Anod instance in which the dep was declared :type parent: Anod :param default_env: default env for the current context :type default_env: BaseEnv :return: env object that should be used by the dependency :rtype: BaseEnv """ # Get the current environment associated with the Anod instance # and adjust it based on dependency parameters dep_env = BaseEnv(parent.env.build, parent.env.host, parent.env.target) # For simulation purposes we sometimes load specs as if it was # loaded on a non local machine thus 'default' does not correspond # to the default build platform of the local machine. if self.build == "default": build = default_env.build.platform else: build = self.build if self.host == "default": host = default_env.build.platform else: host = self.host if self.target == "default": target = default_env.build.platform else: target = self.target dep_env.set_env(build, host, target) return dep_env
def test_dag_2_plan_sources(self): """Check that we can extract values from plan in final dag. Use a scheduler to always create source and ask for a source package creation. """ # Create a new plan context ac = self.create_context() current_env = BaseEnv() cm = plan.PlanContext(server=current_env) # Declare available actions and their signature def anod_action( module, build=None, default_build=False, host=None, target=None, board=None, weathers=None, product_version=None, when_missing=None, manual_action=False, qualifier=None, jobs=None, releases=None, process_suffix=None, update_vcs=False, recursive=None, query_range=None, force_repackage=False, ): pass cm.register_action("anod_source", anod_action) # Create a simple plan content = ["def myserver():", ' anod_source("spec1", weathers="foo")'] with open("plan.txt", "w") as f: f.write("\n".join(content)) myplan = plan.Plan({}) myplan.load("plan.txt") # Execute the plan and create anod actions for action in cm.execute(myplan, "myserver"): ac.add_plan_action(action) for uid, _ in ac.tree: if uid.endswith("sources"): assert ac.tree.get_tag(uid) elif uid.endswith(".source.spec1-src"): assert ac.tree.get_tag(uid) assert ( ac.tree.get_context( vertex_id=uid, reverse_order=True, max_distance=1 )[0][2]["plan_args"]["weathers"] == "foo" )
def __init__(self, stack=None, plan=None, server=None, build=None, host=None, target=None, **kwargs): """Initialize an execution context or a scope. :param stack: stack of BaseEnv object that keep track of scopes. Used only internally :type stack: list[BaseEnv] :param server: a BaseEnv object that represent the host default env. server parameter is taken into account only during creation of the initial context. :type server: BaseEnv :param build: see e3.env.BaseEnv.set_env :type build: str | None :param host: see e3.env.BaseEnv.set_env :type host: str | None :param target: see e3.env.BaseEnv.set_env :type target: str | None :param kwargs: additional data for the current scope/context :type kwargs: dict """ if stack: # This is a scope creation, so copy current env self.stack = stack self.plan = plan new = self.stack[-1].copy(build=build, host=host, target=target) else: self.stack = [] if server is not None: # This is a new context new = server.copy(build, host, target) else: # This is a new context with no server information. In that # case retrieve defaults for the local machine. new = BaseEnv() new.set_env(build, host, target) # Store additional data for k, v in kwargs.iteritems(): setattr(new, k, v) # And push on the stack self.stack.append(new) # Registered functions that correspond to actions. Note that # there is no need to propagate registered action to children # scopes. Only initial context use them. The overall scheme # works also because all scopes refer to the same stack of env # (because the object is mutable). self.actions = {} self.action_list = []
def create_context(self): # Create a context for a x86-linux machine asr = AnodSpecRepository(self.spec_dir) asr.repos['spec1-git'] = 'spec1-git' asr.repos['spec8-git'] = 'spec8-git' asr.repos['spec2-git'] = 'spec2-git' env = BaseEnv() env.set_build('x86-linux', 'rhes6', 'mylinux') ac = AnodContext(asr, default_env=env) return ac
def test_duplicated_lines(self, reject_duplicates): """Check that duplicated lines in plan are properly rejected.""" ac = self.create_context(reject_duplicates=reject_duplicates) current_env = BaseEnv() cm = plan.PlanContext(server=current_env) # Declare available actions and their signature def anod_action( module, build=None, default_build=False, host=None, target=None, board=None, weathers=None, product_version=None, when_missing=None, manual_action=False, qualifier=None, jobs=None, releases=None, process_suffix=None, update_vcs=False, recursive=None, query_range=None, force_repackage=False, ): pass cm.register_action("anod_build", anod_action) # Create a simple plan content = [ "def myserver():", ' anod_build("spec3", weathers="A")', ' anod_build("spec3", weathers="B")', ] with open("plan.plan", "w") as f: f.write("\n".join(content)) myplan = plan.Plan({}) myplan.load("plan.plan") if not reject_duplicates: # Execute the plan and create anod actions # Execute the plan and create anod actions for action in cm.execute(myplan, "myserver"): ac.add_plan_action(action) for uid, _ in ac.tree: if uid.endswith("build"): assert ac.tree.get_tag(uid)["plan_args"]["weathers"] == "B" else: with pytest.raises(SchedulingError): for action in cm.execute(myplan, "myserver"): ac.add_plan_action(action)
def test_plan_call_args(self): """Retrieve call args values.""" current_env = BaseEnv() cm = plan.PlanContext(server=current_env) # Declare available actions and their signature def plan_action(platform): pass cm.register_action("plan_action", plan_action) # Create a simple plan content = ["def myserver():", ' plan_action("any")'] with open("plan.txt", "w") as f: f.write("\n".join(content)) myplan = plan.Plan({}) myplan.load("plan.txt") for action in cm.execute(myplan, "myserver"): assert action.plan_call_args == {"platform": "any"} assert action.plan_args["platform"] == BaseEnv().platform
def test_plan_call_args(self): """Retrieve call args values.""" current_env = BaseEnv() cm = plan.PlanContext(server=current_env) # Declare available actions and their signature def plan_action(platform): pass cm.register_action('plan_action', plan_action) # Create a simple plan content = [u'def myserver():', u' plan_action("any")'] with open('plan.txt', 'w') as f: f.write('\n'.join(content)) myplan = plan.Plan({}) myplan.load('plan.txt') for action in cm.execute(myplan, 'myserver'): assert action.plan_call_args == {'platform': 'any'} assert action.plan_args['platform'] == BaseEnv().platform
def create_context(self, reject_duplicates: bool = True) -> AnodContext: """Create a spec repository and anod context. :param reject_duplicates: whether to reject duplicates in plan """ def repo_conf(name: str) -> dict[str, str]: return {"vcs": "git", "url": name, "branch": "master"} # Create a context for a x86-linux machine asr = AnodSpecRepository(self.spec_dir) asr.repos["spec1-git"] = repo_conf("spec1") asr.repos["spec8-git"] = repo_conf("spec8") asr.repos["spec2-git"] = repo_conf("spec2") asr.repos["a-git"] = repo_conf("a") env = BaseEnv() env.set_build("x86-linux", "rhes6", "mylinux") ac = AnodContext(asr, default_env=env, reject_duplicates=reject_duplicates) return ac
def test_force_download_before_build(self): """Test two deps on the same spec with build and download. Same as test_force_download_after_build but in a different order. The expected result is the same: an error should be raised. """ env = BaseEnv() env.set_build("x86_64-linux", "rhes8", "mylinux") asr = AnodSpecRepository(self.spec_dir) ac = AnodContext(asr, default_env=env) ac.add_anod_action("spec_download_dep", env=ac.default_env, primitive="build") ac.add_anod_action("spec_build_dep", env=ac.default_env, primitive="build") with pytest.raises(SchedulingError) as err: ac.schedule(ac.always_create_source_resolver) assert "explicit DownloadBinary decision made" in str(err)
def create_context(self, reject_duplicates=True): """Create a spec repository and anod context. :param reject_duplicates: whether to reject duplicates in plan :type reject_duplicates: bool :rtype: AnodContext """ # Create a context for a x86-linux machine asr = AnodSpecRepository(self.spec_dir) asr.repos['spec1-git'] = 'spec1-git' asr.repos['spec8-git'] = 'spec8-git' asr.repos['spec2-git'] = 'spec2-git' env = BaseEnv() env.set_build('x86-linux', 'rhes6', 'mylinux') ac = AnodContext(asr, default_env=env, reject_duplicates=reject_duplicates) return ac
def test_force_download_without_require_condition(self): """Test that the force download can be done thanks to require=xxx. A require condition can be added to the build primitive to disable the build primitive for some qualifiers. """ env = BaseEnv() env.set_build("x86_64-linux", "rhes8", "mylinux") asr = AnodSpecRepository(self.spec_dir) # We start with a dependency on spec_nobuild where build primitive # require condition is True ac = AnodContext(asr, default_env=env) ac.add_anod_action( "spec_nobuild_dep", env=ac.default_env, primitive="build", ) # If both build and install are allowed the resolver will # complain and ask for an explicit choice with pytest.raises(SchedulingError) as err: ac.schedule(ac.always_create_source_resolver) assert "what to do for resolving" in str(err) # Now with a dependency making require return False, and so # disable the build primitive, the resolver will not have any # conflict: the only allowed action will be download. ac2 = AnodContext(asr, default_env=env) ac2.add_anod_action( "spec_nobuild_stable_dep", env=ac.default_env, primitive="build", ) result = ac2.schedule(ac.always_create_source_resolver) assert set(result.vertex_data.keys()) == { "root", "mylinux.x86_64-linux.spec_nobuild.download_bin", "mylinux.x86_64-linux.spec_nobuild_stable_dep.build", "mylinux.x86_64-linux.spec_nobuild.install", }
def test_force_download_without_download_primitive(self): """Test that the force download do not require the download primitive. Having a download() primitive or not should not impact this feature. """ env = BaseEnv() env.set_build("x86_64-linux", "rhes8", "mylinux") asr = AnodSpecRepository(self.spec_dir) ac = AnodContext(asr, default_env=env) ac.add_anod_action( "spec_download_dep_for_nodownloadprimitive", env=ac.default_env, primitive="build", ) result = ac.schedule(ac.always_create_source_resolver) assert set(result.vertex_data.keys()) == { "root", "mylinux.x86_64-linux.spec_download_dep_for_nodownloadprimitive.build", "mylinux.x86_64-linux.spec_nodownloadprimitive.install", "mylinux.x86_64-linux.spec_nodownloadprimitive.download_bin", }
def __init__(self, spec_repository, default_env=None): """Initialize a new context. :param spec_repository: an Anod repository :type spec_repository: e3.anod.AnodSpecRepository :param default_env: an env that should be considered as the default for the current context. Mainly useful to simulate another server context. If None then we assume that the context if the local server :type default_env: BaseEnv | None """ self.repo = spec_repository if default_env is None: self.default_env = BaseEnv() else: self.default_env = default_env.copy() self.tree = DAG() self.root = Root() self.add(self.root) self.cache = {} self.sources = {}
def run(self, **kwargs): """Run GNAThub with the appropriate command line. :param kwargs: Keyword arguments that translate into command line switches :type kwargs: dict[str,*] | None """ argv = ['gnathub', '-P', self.project.name] if kwargs.get('quiet', True): argv.append('--quiet') if kwargs.get('verbose', False): argv.append('--verbose') if kwargs.get('incremental', False): argv.append('--incremental') if kwargs.get('gnatcheck_hide_exempted', False): argv.append('--gnatcheck-hide-exempted') if kwargs.get('dry_run', False): argv.append('--dry-run') if kwargs.get('runners_only', False): argv.append('--runners-only') if kwargs.get('reporters_only', False): argv.append('--reporters-only') if kwargs.get('scenario_vars', None): scenario = kwargs['scenario_vars'] assert isinstance(scenario, dict), 'invalid "scenario_vars" arg' argv.extend(['-X%s=%s' % (k, v) for k, v in scenario.items()]) if kwargs.get('plugins', None): assert isinstance(kwargs['plugins'], list), 'invalid "plugins" arg' argv.extend(['--plugins', ','.join(kwargs['plugins'])]) if kwargs.get('subdirs', None): argv.extend(['--subdirs', kwargs['subdirs']]) if kwargs.get('target', None): argv.extend(['--target', kwargs['target']]) if kwargs.get('runtime', None): argv.extend(['--RTS', kwargs['runtime']]) for tool_name, arguments in kwargs.get('tool_args', {}).items(): argv.append('--targs:%s' % tool_name) argv.extend(arguments) argv.append('--') if kwargs.get('script', None): argv.append('--exec') if isinstance(kwargs['script'], str): argv.append(os.path.abspath(kwargs['script'])) elif isinstance(kwargs['script'], Script): argv.append(kwargs['script'].path) else: raise TypeError('expected str or support.Script') # Add the local bin directory to the path to use mocks if any BaseEnv().add_path(self._bin) run_kwargs = { 'output': kwargs.get('output', PIPE), 'error': kwargs.get('error', STDOUT), 'timeout': kwargs.get('timeout', None), 'env': kwargs.get('env', None) } p = Run(argv, cwd=self.project.install_dir, **run_kwargs) if p.status != 0: raise GNAThubExecutionFailed(p.out) if kwargs.get('dry_run', False): # In dry-run mode, the gnathub.backlog file is not created return backlog = os.path.join(self.project.install_dir, 'obj', 'gnathub', 'gnathub.backlog') if not os.path.exists(backlog): # when the object directory is the same as the project dir backlog = os.path.join(self.project.install_dir, 'gnathub', 'gnathub.backlog') with open(backlog, 'r') as fd: plugins = json.loads(fd.read()) failed = [name for name, results in plugins if not results['success']] if failed: raise GNAThubExecutionFailed('plugin(s) failure: {}: {}'.format( failed, p.out))
def __init__( self, stack: Optional[List[BaseEnv]] = None, plan: Optional[Plan] = None, ignore_disabled: bool = True, server: Optional[BaseEnv] = None, build: Optional[str] = None, host: Optional[str] = None, target: Optional[str] = None, enabled: bool = True, **kwargs: Any, ): """Initialize an execution context or a scope. :param stack: stack of BaseEnv object that keep track of scopes. Used only internally. User instantiation of PlanContext should be done with stack set to None. :param plan: the plan to execute :param ignore_disabled: when true, discard all lines in blocks "with defaults(enabled=False):" :param server: a BaseEnv object that represent the host default env. server parameter is taken into account only during creation of the initial context. :param build: see e3.env.BaseEnv.set_env :param host: see e3.env.BaseEnv.set_env :param target: see e3.env.BaseEnv.set_env :param enabled: whether the plan line is enabled or disabled :param kwargs: additional data for the current scope/context """ self.ignore_disabled = ignore_disabled if stack: # This is a scope creation, so copy current env self.stack = stack self.plan = plan new = self.stack[-1].copy(build=build, host=host, target=target) if not enabled: # we are in a block with enabled=False set, disable all # lines in that block new.enabled = enabled else: self.stack = [] if server is not None: # This is a new context new = server.copy(build, host, target) else: # This is a new context with no server information. In that # case retrieve defaults for the local machine. new = BaseEnv() new.set_env(build, host, target) new.enabled = enabled # Store additional data for k, v in kwargs.items(): setattr(new, k, v) # And push on the stack self.stack.append(new) # Registered functions that correspond to actions. Note that # there is no need to propagate registered action to children # scopes. Only initial context use them. The overall scheme # works also because all scopes refer to the same stack of env # (because the object is mutable). self.actions: Dict[str, Callable] = {} self.action_list: List[BaseEnv] = []
def run(self, args): sandbox = SandBox() sandbox.root_dir = args.sandbox if args.specs_dir: sandbox.specs_dir = args.specs_dir if args.create_sandbox: sandbox.create_dirs() if args.create_sandbox and args.spec_git_url: mkdir(sandbox.specs_dir) g = GitRepository(sandbox.specs_dir) if e3.log.default_output_stream is not None: g.log_stream = e3.log.default_output_stream g.init() g.update(args.spec_git_url, args.spec_git_branch, force=True) sandbox.dump_configuration() sandbox.write_scripts() asr = AnodSpecRepository(sandbox.specs_dir) check_api_version(asr.api_version) # Load plan content if needed if args.plan: if not os.path.isfile(args.plan): raise SandBoxError("plan file %s does not exist" % args.plan, origin="SandBoxExec.run") with open(args.plan, "r") as plan_fd: plan_content = ["def main_entry_point():"] plan_content += [ " %s" % line for line in plan_fd.read().splitlines() ] plan_content = "\n".join(plan_content) env = BaseEnv() cm = PlanContext(server=env) store = None resolver = getattr( AnodContext, str(args.resolver), AnodContext.always_create_source_resolver, ) logger.debug("Using resolver %s", resolver.__name__) # Declare available actions and their signature def anod_action(module, build=None, host=None, target=None, qualifier=None): pass # all: no cover for a in ("anod_install", "anod_build", "anod_test"): cm.register_action(a, anod_action) # Load the plan and execute plan = Plan(data={}) plan.load_chunk(plan_content) actions = cm.execute(plan, "main_entry_point") ac = AnodContext(asr, default_env=env) for action in actions: ac.add_anod_action( action.module, action, action.action.replace("anod_", "", 1), action.qualifier, ) # Check if machine plan is locally schedulable action_list = ac.schedule(resolver) e = ElectrolytJobFactory(sandbox, asr, store, dry_run=args.dry_run) e.run(action_list)
def add_spec(self, name, env=None, primitive=None, qualifier=None, expand_build=True, source_name=None): """Expand an anod action into a tree (internal). :param name: spec name :type name: str :param env: spec environment :type env: BaseEnv | None :param primitive: spec primitive :type primitive: str :param qualifier: qualifier :type qualifier: str | None :param expand_build: should build primitive be expanded :type expand_build: bool :param source_name: source name associated with the source primitive :type source_name: str | None """ # Initialize a spec instance spec = self.load(name, qualifier=qualifier, env=env, kind=primitive) # Initialize the resulting action based on the primitive name if primitive == 'source': result = CreateSource(spec, source_name) elif primitive == 'build': result = Build(spec) elif primitive == 'test': result = Test(spec) elif primitive == 'install': result = Install(spec) else: raise Exception(primitive) if not spec.has_package and primitive == 'install' and \ has_primitive(spec, 'build'): # Case in which we have an install dependency but no install # primitive. In that case the real dependency is a build tree # dependency. In case there is no build primitive and no # package keep the install primitive (usually this means there # is an overloaded download procedure). return self.add_spec(name, env, 'build', qualifier, expand_build=False) if expand_build and primitive == 'build' and \ spec.has_package: # A build primitive is required and the spec defined a binary # package. In that case the implicit post action of the build # will be a call to the install primitive return self.add_spec(name, env, 'install', qualifier) # Add this stage if the action is already in the DAG, then it has # already been added. if result in self: return result # Add the action in the DAG self.add(result) if primitive == 'install': # Expand an install node to # install --> decision --> build # \-> download binary download_action = DownloadBinary(spec) self.add(download_action) if has_primitive(spec, 'build'): build_action = self.add_spec(name, env, 'build', qualifier, expand_build=False) self.add_decision(BuildOrInstall, result, build_action, download_action) else: self.connect(result, download_action) # Look for dependencies if '%s_deps' % primitive in dir(spec) and \ getattr(spec, '%s_deps' % primitive) is not None: for e in getattr(spec, '%s_deps' % primitive): if isinstance(e, Dependency): if e.kind == 'source': # A source dependency does not create a new node but # ensure that sources associated with it are available self.load(e.name, kind='source', env=BaseEnv(), qualifier=None) continue child_action = self.add_spec(e.name, e.env(spec, self.default_env), e.kind, e.qualifier) spec.deps[e.local_name] = result.anod_instance if e.kind == 'build' and \ self[child_action.uid].data.kind == 'install': # We have a build tree dependency that produced a # subtree starting with an install node. In that case # we expect the user to choose BUILD as decision. dec = self.predecessors(child_action)[0] if isinstance(dec, BuildOrInstall): dec.add_trigger(result, BuildOrInstall.BUILD) # Connect child dependency self.connect(result, child_action) # Look for source dependencies (i.e sources needed) if '%s_source_list' % primitive in dir(spec): for s in getattr(spec, '%s_source_list' % primitive): # set source builder if s.name in self.sources: s.set_builder(self.sources[s.name]) # add source install node src_install_uid = result.uid.rsplit('.', 1)[0] + \ '.source_install.' + s.name src_install_action = InstallSource(src_install_uid, spec, s) self.add(src_install_action) self.connect(result, src_install_action) # Then add nodes to create that source (download or creation # using anod source and checkouts) if s.name in self.sources: spec_decl, obj = self.sources[s.name] else: raise AnodError(origin='expand_spec', message='source %s does not exist ' '(referenced by %s)' % (s.name, result.uid)) src_get_action = GetSource(obj) if src_get_action in self: self.connect(src_install_action, src_get_action) continue self.add(src_get_action) self.connect(src_install_action, src_get_action) src_download_action = DownloadSource(obj) self.add(src_download_action) if isinstance(obj, UnmanagedSourceBuilder): # In that case only download is available self.connect(src_get_action, src_download_action) else: source_action = self.add_spec(spec_decl, BaseEnv(), 'source', None, source_name=s.name) for repo in obj.checkout: r = Checkout(repo, self.repo.repos[repo]) self.add(r) self.connect(source_action, r) self.add_decision(CreateSourceOrDownload, src_get_action, source_action, src_download_action) return result
def test_dag_2_plan(self): """Check that we can extract values from plan in final dag. Some paramaters passed in the plan are lost in the final scheduled dag, when plan lines are transformed into anod actions. It is possible to retrieve them by looking at the tags. """ # Create a new plan context ac = self.create_context() current_env = BaseEnv() cm = plan.PlanContext(server=current_env) # Declare available actions and their signature def anod_action( module, build=None, default_build=False, host=None, target=None, board=None, weathers=None, product_version=None, when_missing=None, manual_action=False, qualifier=None, jobs=None, releases=None, process_suffix=None, update_vcs=False, recursive=None, query_range=None, force_repackage=False, ): pass for a in ("anod_build", "anod_install", "anod_source", "anod_test"): cm.register_action(a, anod_action) # Create a simple plan content = [ "def myserver():", ' anod_build("spec12", weathers="foo")', ' anod_build("spec10", weathers="foo")', ' anod_build("spec11", weathers="bar")', ] with open("plan.txt", "w") as f: f.write("\n".join(content)) myplan = plan.Plan({}, plan_ext=".txt") myplan.load("plan.txt") # Execute the plan and create anod actions for action in cm.execute(myplan, "myserver"): primitive = action.action.replace("anod_", "", 1) ac.add_anod_action( name=action.module, env=current_env if action.default_build else action, primitive=primitive, qualifier=action.qualifier, plan_line=action.plan_line, plan_args=action.plan_args, ) # Create a reverse tag to have a working get_context # when looking for parameters such as weathers we want to # get the plan line that has triggered the action, e.g. # for spec3.build that has been triggered by spec10.build # we want to propagate the weathers set in the line # anod_build("spec10", weathers="foo") # in the Build action for spec3 reverse_dag = ac.tree.reverse_graph() for uid, action in ac.tree: if uid.endswith("spec12.build"): assert ac.tree.get_tag(uid) cdist, cuid, ctag = reverse_dag.get_context(uid)[0] assert cuid == uid assert ctag["plan_args"]["weathers"] == "foo" assert ctag["plan_line"] == "plan.txt:2" elif uid.endswith("spec3.build"): assert not ac.tree.get_tag(uid) cdist, cuid, ctag = reverse_dag.get_context(uid)[0] assert cuid != uid assert cuid.endswith("spec10.build") assert ctag["plan_args"]["weathers"] == "foo" assert ctag["plan_line"] == "plan.txt:3" elif uid.endswith("spec11.build"): assert ac.tree.get_tag(uid), ac.tree.tags cdist, cuid, ctag = reverse_dag.get_context(uid)[0] assert cuid == uid assert ctag["plan_args"]["weathers"] == "bar" assert ctag["plan_line"] == "plan.txt:4" # Also verify that the instance deps is properly loaded assert set(action.anod_instance.deps.keys()) == {"spec1"} assert action.anod_instance.deps[ "spec1"].__class__.__name__ == "Spec1" # Also test that we are still able to extract the values # after having scheduled the action graph. # Create an explict build action to make sure that the plan can be # scheduled ac.add_anod_action( name="spec3", env=current_env, primitive="build", plan_line="plan.txt:5", plan_args={"weathers": "my_spec3_weather"}, ) sched_dag = ac.schedule(ac.always_download_source_resolver) sched_rev = sched_dag.reverse_graph() for uid, action in sched_dag: if uid.endswith("spec12.build"): assert sched_dag.get_tag(uid) # Also verify that the instance deps is properly loaded assert set( action.anod_instance.deps.keys()) == {"spec1", "spec11"} assert (action.anod_instance.deps["spec11"].__class__.__name__ == "Spec11") assert action.anod_instance.deps[ "spec1"].__class__.__name__ == "Spec1" elif uid.endswith("spec3.build"): assert sched_dag.get_tag(uid) assert (sched_rev.get_context(uid)[0][2]["plan_args"] ["weathers"] == "my_spec3_weather")
def test_dag_2_plan_sources(self): """Check that we can extract values from plan in final dag. Use a scheduler to always create source and ask for a source package creation. """ # Create a new plan context ac = self.create_context() current_env = BaseEnv() cm = plan.PlanContext(server=current_env) # Declare available actions and their signature def anod_action(module, build=None, default_build=False, host=None, target=None, board=None, weathers=None, product_version=None, when_missing=None, manual_action=False, qualifier=None, jobs=None, releases=None, process_suffix=None, update_vcs=False, recursive=None, query_range=None, force_repackage=False): pass cm.register_action('anod_source', anod_action) # Create a simple plan content = [ u'def myserver():', u' anod_source("spec1", weathers="foo")' ] with open('plan.txt', 'w') as f: f.write('\n'.join(content)) myplan = plan.Plan({}) myplan.load('plan.txt') # Execute the plan and create anod actions for action in cm.execute(myplan, 'myserver'): primitive = action.action.replace('anod_', '', 1) ac.add_anod_action( name=action.module, env=current_env if action.default_build else action, primitive=primitive, qualifier=action.qualifier, plan_line=action.plan_line, plan_args=action.plan_args) for uid, action in ac.tree: if uid.endswith('sources'): assert ac.tree.get_tag(uid) elif uid.endswith('spec1-src'): assert ac.tree.get_tag(uid) is None assert ac.tree.get_context( vertex_id=uid, reverse_order=True, max_distance=1)[0][2]['plan_args']['weathers'] == 'foo'
def test_duplicated_lines(self, reject_duplicates): """Check that duplicated lines in plan are properly rejected.""" ac = self.create_context(reject_duplicates=reject_duplicates) current_env = BaseEnv() cm = plan.PlanContext(server=current_env) # Declare available actions and their signature def anod_action(module, build=None, default_build=False, host=None, target=None, board=None, weathers=None, product_version=None, when_missing=None, manual_action=False, qualifier=None, jobs=None, releases=None, process_suffix=None, update_vcs=False, recursive=None, query_range=None, force_repackage=False): pass cm.register_action('anod_build', anod_action) # Create a simple plan content = [ u'def myserver():', u' anod_build("spec3", weathers="A")', u' anod_build("spec3", weathers="B")' ] with open('plan.txt', 'w') as f: f.write('\n'.join(content)) myplan = plan.Plan({}) myplan.load('plan.txt') if not reject_duplicates: # Execute the plan and create anod actions # Execute the plan and create anod actions for action in cm.execute(myplan, 'myserver'): primitive = action.action.replace('anod_', '', 1) ac.add_anod_action( name=action.module, env=current_env if action.default_build else action, primitive=primitive, qualifier=action.qualifier, plan_line=action.plan_line, plan_args=action.plan_args) for uid, action in ac.tree: if uid.endswith('build'): assert ac.tree.get_tag(uid)['plan_args']['weathers'] == 'B' else: with pytest.raises(SchedulingError): for action in cm.execute(myplan, 'myserver'): primitive = action.action.replace('anod_', '', 1) ac.add_anod_action( name=action.module, env=current_env if action.default_build else action, primitive=primitive, qualifier=action.qualifier, plan_line=action.plan_line, plan_args=action.plan_args)
def add_spec(self, name, env=None, primitive=None, qualifier=None, source_packages=None, expand_build=True, source_name=None, plan_line=None, plan_args=None, force_source_deps=None): """Expand an anod action into a tree (internal). :param name: spec name :type name: str :param env: spec environment :type env: BaseEnv | None :param primitive: spec primitive :type primitive: str :param qualifier: qualifier :type qualifier: str | None :param source_packages: if not empty only create the specified list of source packages and not all source packages defined in the anod specification file :type source_packages: list[str] | None :param expand_build: should build primitive be expanded :type expand_build: bool :param source_name: source name associated with the source primitive :type source_name: str | None :param force_source_deps: whether to force loading the source deps :type force_source_deps: bool """ def add_action(data, connect_with=None): self.add(data) if connect_with is not None: self.connect(connect_with, data) # Initialize a spec instance e3.log.debug('name:{}, qualifier:{}, primitive:{}'.format( name, qualifier, primitive)) spec = self.load(name, qualifier=qualifier, env=env, kind=primitive) # Initialize the resulting action based on the primitive name if primitive == 'source': if source_name is not None: result = CreateSource(spec, source_name) else: # Create the root node result = CreateSources(spec) # A consequence of calling add_action here # will result in skipping dependencies parsing. add_action(result) # Then one node for each source package for sb in spec.source_pkg_build: if source_packages and sb not in source_packages: # This source package is defined in the spec but # explicitly excluded in the plan continue if isinstance(sb, UnmanagedSourceBuilder): # do not create source package for unmanaged source continue sub_result = self.add_spec(name=name, env=env, primitive='source', source_name=sb.name, plan_line=plan_line, plan_args=None) self.connect(result, sub_result) elif primitive == 'build': result = Build(spec) elif primitive == 'test': result = Test(spec) elif primitive == 'install': result = Install(spec) else: # defensive code raise ValueError('add_spec error: %s is not known' % primitive) # If this action is directly linked with a plan line make sure # to register the link between the action and the plan even # if the action has already been added via another dependency if plan_line is not None and plan_args is not None: self.link_to_plan(vertex_id=result.uid, plan_line=plan_line, plan_args=plan_args) if primitive == 'install' and \ not (spec.has_package and spec.component is not None) and \ has_primitive(spec, 'build'): # Case in which we have an install dependency but no install # primitive. In that case the real dependency is a build tree # dependency. In case there is no build primitive and no # package keep the install primitive (usually this means there # is an overloaded download procedure). return self.add_spec(name, env, 'build', qualifier, expand_build=False, plan_args=plan_args, plan_line=plan_line, force_source_deps=force_source_deps) if expand_build and primitive == 'build' and \ (spec.has_package and spec.component is not None): # A build primitive is required and the spec defined a binary # package. In that case the implicit post action of the build # will be a call to the install primitive return self.add_spec(name, env, 'install', qualifier, plan_args=None, plan_line=plan_line, force_source_deps=force_source_deps) # Add this stage if the action is already in the DAG, then it has # already been added. if result in self: return result if not has_primitive(spec, primitive): raise SchedulingError('spec %s does not support primitive %s' % (name, primitive)) # Add the action in the DAG add_action(result) if primitive == 'install': # Expand an install node to # install --> decision --> build # \-> download binary download_action = DownloadBinary(spec) add_action(download_action) if has_primitive(spec, 'build'): build_action = self.add_spec( name=name, env=env, primitive='build', qualifier=qualifier, expand_build=False, plan_args=None, plan_line=plan_line, force_source_deps=force_source_deps) self.add_decision(BuildOrDownload, result, build_action, download_action) else: self.connect(result, download_action) elif primitive == 'source': if source_name is not None: for sb in spec.source_pkg_build: if sb.name == source_name: for checkout in sb.checkout: if checkout not in self.repo.repos: logger.warning('unknown repository %s', checkout) co = Checkout(checkout, self.repo.repos.get(checkout)) add_action(co, result) # Look for dependencies spec_dependencies = [] if '%s_deps' % primitive in dir(spec) and \ getattr(spec, '%s_deps' % primitive) is not None: spec_dependencies += getattr(spec, '%s_deps' % primitive) if force_source_deps and primitive != 'source': if 'source_deps' in dir(spec) and \ getattr(spec, 'source_deps') is not None: spec_dependencies += getattr(spec, 'source_deps') for e in spec_dependencies: if isinstance(e, Dependency): if e.kind == 'source': # A source dependency does not create a new node but # ensure that sources associated with it are available child_instance = self.load(e.name, kind='source', env=BaseEnv(), qualifier=None) spec.deps[e.local_name] = child_instance if force_source_deps: # When in force_source_deps we also want to add # source_deps of all "source_pkg" dependencies. if 'source_deps' in dir(child_instance) and \ getattr(child_instance, 'source_deps') is not None: spec_dependencies += child_instance.source_deps continue child_action = self.add_spec( name=e.name, env=e.env(spec, self.default_env), primitive=e.kind, qualifier=e.qualifier, plan_args=None, plan_line=plan_line, force_source_deps=force_source_deps) spec.deps[e.local_name] = child_action.anod_instance if e.kind == 'build' and \ self[child_action.uid].data.kind == 'install': # We have a build tree dependency that produced a # subtree starting with an install node. In that case # we expect the user to choose BUILD as decision. dec = self.predecessors(child_action)[0] if isinstance(dec, BuildOrDownload): dec.add_trigger( result, BuildOrDownload.BUILD, plan_line if plan_line is not None else 'unknown line') # Connect child dependency self.connect(result, child_action) # Look for source dependencies (i.e sources needed) if '%s_source_list' % primitive in dir(spec): source_list = getattr(spec, '{}_source_list'.format(primitive)) for s in source_list: # set source builder if s.name in self.sources: s.set_builder(self.sources[s.name]) # set other sources to compute source ignore s.set_other_sources(source_list) # add source install node src_install_uid = result.uid.rsplit('.', 1)[0] + \ '.source_install.' + s.name src_install_action = InstallSource(src_install_uid, spec, s) add_action(src_install_action, connect_with=result) # Then add nodes to create that source (download or creation # using anod source and checkouts) if s.name in self.sources: spec_decl, obj = self.sources[s.name] else: raise AnodError(origin='expand_spec', message='source %s does not exist ' '(referenced by %s)' % (s.name, result.uid)) src_get_action = GetSource(obj) if src_get_action in self: self.connect(src_install_action, src_get_action) continue add_action(src_get_action, connect_with=src_install_action) src_download_action = DownloadSource(obj) add_action(src_download_action) if isinstance(obj, UnmanagedSourceBuilder): # In that case only download is available self.connect(src_get_action, src_download_action) else: source_action = self.add_spec(name=spec_decl, env=BaseEnv(), primitive='source', plan_args=None, plan_line=plan_line, source_name=s.name) for repo in obj.checkout: r = Checkout(repo, self.repo.repos.get(repo)) add_action(r, connect_with=source_action) self.add_decision(CreateSourceOrDownload, src_get_action, source_action, src_download_action) return result