def test_force_download_before_install(self): """Test two deps on the same spec with installation and download. Same as test_force_download_after_install but in a different order. The end result should be the same. """ env = BaseEnv() env.set_build("x86_64-linux", "rhes8", "mylinux") asr = AnodSpecRepository(self.spec_dir) ac = AnodContext(asr, default_env=env) ac.add_anod_action("spec_download_dep", env=ac.default_env, primitive="build") result = ac.schedule(ac.always_create_source_resolver) assert set(result.vertex_data.keys()) == { "root", "mylinux.x86_64-linux.spec_download_dep.build", "mylinux.x86_64-linux.spec_build.install", "mylinux.x86_64-linux.spec_build.download_bin", } ac.add_anod_action("spec_install_dep", env=ac.default_env, primitive="build") ac.schedule(ac.always_create_source_resolver) result = ac.schedule(ac.always_create_source_resolver) assert set(result.vertex_data.keys()) == { "root", "mylinux.x86_64-linux.spec_install_dep.build", "mylinux.x86_64-linux.spec_download_dep.build", "mylinux.x86_64-linux.spec_build.install", "mylinux.x86_64-linux.spec_build.download_bin", }
def test_force_download_after_install(self): """Test two deps on the same spec with installation and download. Here we have two specs having an "installation" and a "download" depdendency on the same spec (spec_build). When the two are set together the scheduler find the proper solution: download. """ env = BaseEnv() env.set_build("x86_64-linux", "rhes8", "mylinux") asr = AnodSpecRepository(self.spec_dir) ac = AnodContext(asr, default_env=env) ac.add_anod_action("spec_install_dep", env=ac.default_env, primitive="build") with pytest.raises(SchedulingError) as err: ac.schedule(ac.always_create_source_resolver) assert "This plan resolver cannot decide" in str(err) ac.add_anod_action("spec_download_dep", env=ac.default_env, primitive="build") result = ac.schedule(ac.always_create_source_resolver) assert set(result.vertex_data.keys()) == { "root", "mylinux.x86_64-linux.spec_install_dep.build", "mylinux.x86_64-linux.spec_download_dep.build", "mylinux.x86_64-linux.spec_build.install", "mylinux.x86_64-linux.spec_build.download_bin", }
def test_force_download_after_build(self): """Test two deps on the same spec with build and download. Here we have two specs having an "build_tree" and a "download" depdendency on the same spec (spec_build). When the two are set together the scheduler cannot find a solution. """ env = BaseEnv() env.set_build("x86_64-linux", "rhes8", "mylinux") asr = AnodSpecRepository(self.spec_dir) ac = AnodContext(asr, default_env=env) ac.add_anod_action("spec_build_dep", env=ac.default_env, primitive="build") # Verify that, when scheduling this plan, the scheduler ask for # having an explicit build with pytest.raises(SchedulingError) as err: ac.schedule(ac.always_create_source_resolver) assert "A spec in the plan has a build_tree dependency on spec_build" in str( err) # Verify that after adding a download dep, the scheduler now # warns that he cannot resolve the plan ac.add_anod_action("spec_download_dep", env=ac.default_env, primitive="build") with pytest.raises(SchedulingError) as err: ac.schedule(ac.always_create_source_resolver) assert "explicit DownloadBinary decision made" in str(err)
def env(self, parent, default_env): """Retrieve env for the dependency. :param parent: Anod instance in which the dep was declared :type parent: Anod :param default_env: default env for the current context :type default_env: BaseEnv :return: env object that should be used by the dependency :rtype: BaseEnv """ # Get the current environment associated with the Anod instance # and adjust it based on dependency parameters dep_env = BaseEnv(parent.env.build, parent.env.host, parent.env.target) # For simulation purposes we sometimes load specs as if it was # loaded on a non local machine thus 'default' does not correspond # to the default build platform of the local machine. if self.build == "default": build = default_env.build.platform else: build = self.build if self.host == "default": host = default_env.build.platform else: host = self.host if self.target == "default": target = default_env.build.platform else: target = self.target dep_env.set_env(build, host, target) return dep_env
def __init__(self, stack=None, plan=None, server=None, build=None, host=None, target=None, **kwargs): """Initialize an execution context or a scope. :param stack: stack of BaseEnv object that keep track of scopes. Used only internally :type stack: list[BaseEnv] :param server: a BaseEnv object that represent the host default env. server parameter is taken into account only during creation of the initial context. :type server: BaseEnv :param build: see e3.env.BaseEnv.set_env :type build: str | None :param host: see e3.env.BaseEnv.set_env :type host: str | None :param target: see e3.env.BaseEnv.set_env :type target: str | None :param kwargs: additional data for the current scope/context :type kwargs: dict """ if stack: # This is a scope creation, so copy current env self.stack = stack self.plan = plan new = self.stack[-1].copy(build=build, host=host, target=target) else: self.stack = [] if server is not None: # This is a new context new = server.copy(build, host, target) else: # This is a new context with no server information. In that # case retrieve defaults for the local machine. new = BaseEnv() new.set_env(build, host, target) # Store additional data for k, v in kwargs.iteritems(): setattr(new, k, v) # And push on the stack self.stack.append(new) # Registered functions that correspond to actions. Note that # there is no need to propagate registered action to children # scopes. Only initial context use them. The overall scheme # works also because all scopes refer to the same stack of env # (because the object is mutable). self.actions = {} self.action_list = []
def test_context_init(self): # Create a context using: # 1. the local default configuration # 2. forcing a x86-linux configuration asr = AnodSpecRepository(self.spec_dir) ac = AnodContext(asr) assert ac.default_env.build == BaseEnv().build assert ac.default_env.host == BaseEnv().host assert ac.default_env.target == BaseEnv().target self.create_context()
def create_context(self): # Create a context for a x86-linux machine asr = AnodSpecRepository(self.spec_dir) asr.repos['spec1-git'] = 'spec1-git' asr.repos['spec8-git'] = 'spec8-git' asr.repos['spec2-git'] = 'spec2-git' env = BaseEnv() env.set_build('x86-linux', 'rhes6', 'mylinux') ac = AnodContext(asr, default_env=env) return ac
def __init__(self, stack=None, plan=None, server=None, build=None, host=None, target=None, **kwargs): """Initialize an execution context or a scope. :param stack: stack of BaseEnv object that keep track of scopes. Used only internally :type stack: list[BaseEnv] :param server: a BaseEnv object that represent the host default env. server parameter is taken into acount only during creation of the initial context. :type server: BaseEnv :param build: see e3.env.BaseEnv.set_env :type build: str | None :param host: see e3.env.BaseEnv.set_env :type host: str | None :param target: see e3.env.BaseEnv.set_env :type target: str | None :param kwargs: additional data for the current scope/context :type kwargs: dict """ if stack: # This is a scope creation, so copy current env self.stack = stack self.plan = plan new = self.stack[-1].copy(build=build, host=host, target=target) else: self.stack = [] if server is not None: # This is a new context new = server.copy(build, host, target) else: # This is a new context with no server information. In that # case retrieve defaults for the local machine. new = BaseEnv() new.set_env(build, host, target) # Store additionnal data for k, v in kwargs.iteritems(): setattr(new, k, v) # And push on the stack self.stack.append(new) # Registered functions that correspond to actions. Note that # there is no need to propagate registered action to children # scopes. Only initial context use them. The overall scheme # works also because all scopes refer to the same stack of env # (because the object is mutable). self.actions = {}
def __init__( self, hostname: str, platform: str, version: str, **kwargs: Any ) -> None: """Initialize an host entry. :param hostname: host name :param platform: platform name (see e3.platform) :param version: platform version (usually OS version) :param kwargs: additional user defined data. each key from the data dict is accessible like a regular attribute. """ BaseEnv.__init__(self) self.set_build(name=str(platform), version=str(version), machine=str(hostname)) self._instance.update(kwargs)
def do_printenv(m: Main, set_prog: bool = True) -> int: """Print the environment for the given spec.""" if set_prog: m.argument_parser.prog = m.argument_parser.prog + " printenv" m.argument_parser.add_argument( "spec_name", help="spec to build. This is " "the basename of an .anod file (without the extension)", ) m.argument_parser.add_argument("--qualifier", help="optional qualifier") m.argument_parser.add_argument( "--sandbox-dir", help="directory in which build artifacts are stored", default=SBX_DIR, ) m.argument_parser.add_argument( "--build-env", help="print build environment", action="store_true", default=False, ) m.parse_args() # Disable logging messages except errors logging.getLogger("").setLevel(logging.ERROR) check_common_tools() ac = create_anod_context(SPEC_DIR) sbx = create_anod_sandbox(m.args.sandbox_dir, SPEC_DIR) anod_instance = ac.add_anod_action( name=m.args.spec_name, primitive="build", qualifier=m.args.qualifier, sandbox=sbx, upload=False, env=BaseEnv.from_env(), ).anod_instance saved_env = {k: v for k, v in os.environ.items()} if m.args.build_env: if hasattr(anod_instance, "build_setenv"): anod_instance.build_setenv() else: if hasattr(anod_instance, "setenv"): anod_instance.setenv() for var, value in os.environ.items(): if var not in saved_env or saved_env[var] != os.environ[var]: print('export %s="%s";' % (var, value)) if m.args.verbose >= 1: print('printf "I set %s=\\"%s\\"\\n\\n";' % (var, value)) print(" ") print(BANNER % m.args.spec_name) return 0
def add_plan_action(self, plan_action_env: PlanActionEnv, sandbox: Optional[SandBox] = None) -> Optional[Action]: """Add an Anod action to the context. :param plan_action_env: the PlanActionEnv object as returned by PlanContext :param sandbox: the SandBox object that will be used to run commands :return: the root added action or None if this is not an anod action """ action_name = plan_action_env.action if not action_name.startswith( "anod_") or plan_action_env.module is None: return None primitive = action_name.replace("anod_", "", 1) if (primitive != "build" and primitive != "install" and primitive != "test" and primitive != "source"): logger.warning(f"Unknown primtive {primitive}") return None elif TYPE_CHECKING: primitive = cast(PRIMITIVE, primitive) return self.add_anod_action( name=plan_action_env.module, env=self.default_env if plan_action_env.default_build else BaseEnv.from_env(plan_action_env), primitive=primitive, qualifier=plan_action_env.qualifier, source_packages=plan_action_env.source_packages, upload=plan_action_env.push_to_store, plan_line=plan_action_env.plan_line, plan_args=plan_action_env.plan_args, sandbox=sandbox, )
def __init__(self, spec_repository, default_env=None, reject_duplicates=False): """Initialize a new context. :param spec_repository: an Anod repository :type spec_repository: e3.anod.loader.AnodSpecRepository :param default_env: an env that should be considered as the default for the current context. Mainly useful to simulate another server context. If None then we assume that the context if the local server :type default_env: BaseEnv | None :param reject_duplicates: if True, raise SchedulingError when two duplicated action are generated :type reject_duplicates: bool """ self.repo = spec_repository if default_env is None: self.default_env = BaseEnv() else: self.default_env = default_env.copy() self.reject_duplicates = reject_duplicates self.tree = DAG() self.root = Root() self.dependencies = {} self.add(self.root) self.cache = {} self.sources = {}
def __init__( self, spec_repository: AnodSpecRepository, default_env: Optional[BaseEnv] = None, reject_duplicates: bool = False, ): """Initialize a new context. :param spec_repository: an Anod repository :param default_env: an env that should be considered as the default for the current context. Mainly useful to simulate another server context. If None then we assume that the context if the local server :param reject_duplicates: if True, raise SchedulingError when two duplicated action are generated """ self.repo = spec_repository if default_env is None: self.default_env = BaseEnv() else: self.default_env = default_env.copy() self.reject_duplicates = reject_duplicates self.tree = DAG() self.root = Root() self.dependencies: Dict[str, Dict[str, Tuple[Dependency, Anod]]] = {} self.add(self.root) self.cache: Dict[CacheKeyType, Anod] = {} self.sources: Dict[str, Tuple[str, SourceBuilder]] = {}
def add_plan_action(self, plan_action_env: PlanActionEnv, sandbox: Optional[SandBox] = None) -> Optional[Action]: """Add an Anod action to the context. :param plan_action_env: the PlanActionEnv object as returned by PlanContext :param sandbox: the SandBox object that will be used to run commands :return: the root added action or None if this is not an anod action """ action_name = plan_action_env.action if not action_name.startswith( "anod_") or plan_action_env.module is None: return None primitive = action_name.replace("anod_", "", 1) return self.add_anod_action( name=plan_action_env.module, env=self.default_env if plan_action_env.default_build else BaseEnv.from_env(plan_action_env), primitive=primitive, qualifier=plan_action_env.qualifier, source_packages=plan_action_env.source_packages, upload=plan_action_env.push_to_store, plan_line=plan_action_env.plan_line, plan_args=plan_action_env.plan_args, sandbox=sandbox, )
def test_dag_2_plan_sources(self): """Check that we can extract values from plan in final dag. Use a scheduler to always create source and ask for a source package creation. """ # Create a new plan context ac = self.create_context() current_env = BaseEnv() cm = plan.PlanContext(server=current_env) # Declare available actions and their signature def anod_action( module, build=None, default_build=False, host=None, target=None, board=None, weathers=None, product_version=None, when_missing=None, manual_action=False, qualifier=None, jobs=None, releases=None, process_suffix=None, update_vcs=False, recursive=None, query_range=None, force_repackage=False, ): pass cm.register_action("anod_source", anod_action) # Create a simple plan content = ["def myserver():", ' anod_source("spec1", weathers="foo")'] with open("plan.txt", "w") as f: f.write("\n".join(content)) myplan = plan.Plan({}) myplan.load("plan.txt") # Execute the plan and create anod actions for action in cm.execute(myplan, "myserver"): ac.add_plan_action(action) for uid, _ in ac.tree: if uid.endswith("sources"): assert ac.tree.get_tag(uid) elif uid.endswith(".source.spec1-src"): assert ac.tree.get_tag(uid) assert ( ac.tree.get_context( vertex_id=uid, reverse_order=True, max_distance=1 )[0][2]["plan_args"]["weathers"] == "foo" )
def do_build(m: Main, set_prog: bool = True) -> int: """Perform the build.""" if set_prog: m.argument_parser.prog = m.argument_parser.prog + " build" m.argument_parser.add_argument( "spec_name", help="spec to build. This is " "the basename of an .anod file (without the extension)", ) m.argument_parser.add_argument("--qualifier", help="optional qualifier") m.argument_parser.add_argument( "--sandbox-dir", help="directory in which build artefacts are stored", default=SBX_DIR, ) m.argument_parser.add_argument( "--force", help="force rebuild of everything", action="store_true", default=False, ) m.parse_args() check_common_tools() ac = create_anod_context(SPEC_DIR) sbx = create_anod_sandbox(m.args.sandbox_dir, SPEC_DIR) sbx.create_dirs() ac.add_anod_action( name=m.args.spec_name, primitive="build", qualifier=m.args.qualifier, sandbox=sbx, upload=False, env=BaseEnv.from_env(), ) actions = ac.schedule(resolver=ac.always_create_source_resolver) walker = UxasBuilder(actions, sandbox=sbx, force=m.args.force) # TODO: something with walker.job_status['root'], assuming we can get a # useful value there. Right now, it's always 'unknown' # # In the meantime, python > 3.6 guarantees the order of keys in a dict. # The job_status dict has as its penultimate entry the thing we asked to # build or the last thing that failed (the last non-root node). It's ugly, # but _should_ be safe to use this, until we have resolution for root # always reporting unknown. result: ReturnValue = list(walker.job_status.values())[-2] if result in BUILD_SUCCESS: return 0 else: return result.value
def test_force_download_before_build(self): """Test two deps on the same spec with build and download. Same as test_force_download_after_build but in a different order. The expected result is the same: an error should be raised. """ env = BaseEnv() env.set_build("x86_64-linux", "rhes8", "mylinux") asr = AnodSpecRepository(self.spec_dir) ac = AnodContext(asr, default_env=env) ac.add_anod_action("spec_download_dep", env=ac.default_env, primitive="build") ac.add_anod_action("spec_build_dep", env=ac.default_env, primitive="build") with pytest.raises(SchedulingError) as err: ac.schedule(ac.always_create_source_resolver) assert "explicit DownloadBinary decision made" in str(err)
def create_context(self, reject_duplicates: bool = True) -> AnodContext: """Create a spec repository and anod context. :param reject_duplicates: whether to reject duplicates in plan """ def repo_conf(name: str) -> dict[str, str]: return {"vcs": "git", "url": name, "branch": "master"} # Create a context for a x86-linux machine asr = AnodSpecRepository(self.spec_dir) asr.repos["spec1-git"] = repo_conf("spec1") asr.repos["spec8-git"] = repo_conf("spec8") asr.repos["spec2-git"] = repo_conf("spec2") asr.repos["a-git"] = repo_conf("a") env = BaseEnv() env.set_build("x86-linux", "rhes6", "mylinux") ac = AnodContext(asr, default_env=env, reject_duplicates=reject_duplicates) return ac
def create_context(self, reject_duplicates=True): """Create a spec repository and anod context. :param reject_duplicates: whether to reject duplicates in plan :type reject_duplicates: bool :rtype: AnodContext """ # Create a context for a x86-linux machine asr = AnodSpecRepository(self.spec_dir) asr.repos['spec1-git'] = 'spec1-git' asr.repos['spec8-git'] = 'spec8-git' asr.repos['spec2-git'] = 'spec2-git' env = BaseEnv() env.set_build('x86-linux', 'rhes6', 'mylinux') ac = AnodContext(asr, default_env=env, reject_duplicates=reject_duplicates) return ac
def test_duplicated_lines(self, reject_duplicates): """Check that duplicated lines in plan are properly rejected.""" ac = self.create_context(reject_duplicates=reject_duplicates) current_env = BaseEnv() cm = plan.PlanContext(server=current_env) # Declare available actions and their signature def anod_action( module, build=None, default_build=False, host=None, target=None, board=None, weathers=None, product_version=None, when_missing=None, manual_action=False, qualifier=None, jobs=None, releases=None, process_suffix=None, update_vcs=False, recursive=None, query_range=None, force_repackage=False, ): pass cm.register_action("anod_build", anod_action) # Create a simple plan content = [ "def myserver():", ' anod_build("spec3", weathers="A")', ' anod_build("spec3", weathers="B")', ] with open("plan.plan", "w") as f: f.write("\n".join(content)) myplan = plan.Plan({}) myplan.load("plan.plan") if not reject_duplicates: # Execute the plan and create anod actions # Execute the plan and create anod actions for action in cm.execute(myplan, "myserver"): ac.add_plan_action(action) for uid, _ in ac.tree: if uid.endswith("build"): assert ac.tree.get_tag(uid)["plan_args"]["weathers"] == "B" else: with pytest.raises(SchedulingError): for action in cm.execute(myplan, "myserver"): ac.add_plan_action(action)
def test_force_download_without_require_condition(self): """Test that the force download can be done thanks to require=xxx. A require condition can be added to the build primitive to disable the build primitive for some qualifiers. """ env = BaseEnv() env.set_build("x86_64-linux", "rhes8", "mylinux") asr = AnodSpecRepository(self.spec_dir) # We start with a dependency on spec_nobuild where build primitive # require condition is True ac = AnodContext(asr, default_env=env) ac.add_anod_action( "spec_nobuild_dep", env=ac.default_env, primitive="build", ) # If both build and install are allowed the resolver will # complain and ask for an explicit choice with pytest.raises(SchedulingError) as err: ac.schedule(ac.always_create_source_resolver) assert "what to do for resolving" in str(err) # Now with a dependency making require return False, and so # disable the build primitive, the resolver will not have any # conflict: the only allowed action will be download. ac2 = AnodContext(asr, default_env=env) ac2.add_anod_action( "spec_nobuild_stable_dep", env=ac.default_env, primitive="build", ) result = ac2.schedule(ac.always_create_source_resolver) assert set(result.vertex_data.keys()) == { "root", "mylinux.x86_64-linux.spec_nobuild.download_bin", "mylinux.x86_64-linux.spec_nobuild_stable_dep.build", "mylinux.x86_64-linux.spec_nobuild.install", }
def test_plan_call_args(self): """Retrieve call args values.""" current_env = BaseEnv() cm = plan.PlanContext(server=current_env) # Declare available actions and their signature def plan_action(platform): pass cm.register_action('plan_action', plan_action) # Create a simple plan content = [u'def myserver():', u' plan_action("any")'] with open('plan.txt', 'w') as f: f.write('\n'.join(content)) myplan = plan.Plan({}) myplan.load('plan.txt') for action in cm.execute(myplan, 'myserver'): assert action.plan_call_args == {'platform': 'any'} assert action.plan_args['platform'] == BaseEnv().platform
def test_plan_call_args(self): """Retrieve call args values.""" current_env = BaseEnv() cm = plan.PlanContext(server=current_env) # Declare available actions and their signature def plan_action(platform): pass cm.register_action("plan_action", plan_action) # Create a simple plan content = ["def myserver():", ' plan_action("any")'] with open("plan.txt", "w") as f: f.write("\n".join(content)) myplan = plan.Plan({}) myplan.load("plan.txt") for action in cm.execute(myplan, "myserver"): assert action.plan_call_args == {"platform": "any"} assert action.plan_args["platform"] == BaseEnv().platform
def __init__(self, hostname, platform, version, **kwargs): """Initialize an host entry. :param hostname: host name :type hostname: str :param platform: platform name (see e3.platform) :type platform: str :param version: platform version (usually OS version) :type version: str :param kwargs: additional user defined data. each key from the data dict is accesiible like a regular attribute. :type kwargs: dict """ BaseEnv.__init__(self) self.set_build(name=str(platform), version=str(version), machine=str(hostname)) self._instance.update(kwargs)
def test_force_download_without_download_primitive(self): """Test that the force download do not require the download primitive. Having a download() primitive or not should not impact this feature. """ env = BaseEnv() env.set_build("x86_64-linux", "rhes8", "mylinux") asr = AnodSpecRepository(self.spec_dir) ac = AnodContext(asr, default_env=env) ac.add_anod_action( "spec_download_dep_for_nodownloadprimitive", env=ac.default_env, primitive="build", ) result = ac.schedule(ac.always_create_source_resolver) assert set(result.vertex_data.keys()) == { "root", "mylinux.x86_64-linux.spec_download_dep_for_nodownloadprimitive.build", "mylinux.x86_64-linux.spec_nodownloadprimitive.install", "mylinux.x86_64-linux.spec_nodownloadprimitive.download_bin", }
def activate(self, sandbox: SandBox, spec_repository: AnodSpecRepository) -> None: self.anod_instance.bind_to_sandbox(sandbox) self.anod_instance.log = e3.log.getLogger("spec." + self.anod_instance.uid) for e in getattr(self.anod_instance, "%s_deps" % self.anod_instance.kind, ()): if isinstance(e, self.anod_instance.Dependency): dep_class = spec_repository.load(e.name) dep_instance = dep_class( qualifier=e.qualifier, kind=e.kind, env=e.env(self.anod_instance, BaseEnv.from_env()), ) self.anod_instance.deps[e.local_name] = dep_instance e3.log.debug("activating spec %s", self.anod_instance.uid)
def env(self, parent, default_env=None): """Retrieve env for the dependency. :param parent: Anod instance in which the dep was declared :type parent: Anod :param default_env: default env for the current context :type default_env: BaseEnv | None :return: env object that should be used by the dependency :rtype: BaseEnv """ # Get the current environment associated with the Anod instance # and adjust it based on dependency parameters dep_env = BaseEnv(parent.env.build, parent.env.host, parent.env.target) if self.build == 'default' and default_env is not None: # For simulation purposes we sometimes load specs as if it was # load on a non local machine thus 'default' does not correspond # to the default build platform of the local machine. build = default_env.build.platform else: build = self.build dep_env.set_env(build, self.host, self.target) return dep_env
def activate(self, sandbox, spec_repository): self.anod_instance.build_space = sandbox.get_build_space( name=self.anod_instance.build_space_name, primitive=self.anod_instance.kind, platform=self.anod_instance.env.platform) self.anod_instance.log = e3.log.getLogger('spec.' + self.anod_instance.uid) for e in getattr(self.anod_instance, '%s_deps' % self.anod_instance.kind, ()): if isinstance(e, self.anod_instance.Dependency): dep_class = spec_repository.load(e.name) dep_instance = dep_class(qualifier=e.qualifier, kind=e.kind, env=e.env(self.anod_instance, BaseEnv.from_env())) self.anod_instance.deps[e.local_name] = dep_instance e3.log.debug('activating spec %s', self.anod_instance.uid)
def __init__(self, spec_repository, default_env=None): """Initialize a new context. :param spec_repository: an Anod repository :type spec_repository: e3.anod.AnodSpecRepository :param default_env: an env that should be considered as the default for the current context. Mainly useful to simulate another server context. If None then we assume that the context if the local server :type default_env: BaseEnv | None """ self.repo = spec_repository if default_env is None: self.default_env = BaseEnv() else: self.default_env = default_env.copy() self.tree = DAG() self.root = Root() self.add(self.root) self.cache = {} self.sources = {}
def run(self, args): sandbox = SandBox() sandbox.root_dir = args.sandbox if args.specs_dir: sandbox.specs_dir = args.specs_dir if args.create_sandbox: sandbox.create_dirs() if args.create_sandbox and args.spec_git_url: mkdir(sandbox.specs_dir) g = GitRepository(sandbox.specs_dir) if e3.log.default_output_stream is not None: g.log_stream = e3.log.default_output_stream g.init() g.update(args.spec_git_url, args.spec_git_branch, force=True) sandbox.dump_configuration() sandbox.write_scripts() asr = AnodSpecRepository(sandbox.specs_dir) check_api_version(asr.api_version) # Load plan content if needed if args.plan: if not os.path.isfile(args.plan): raise SandBoxError("plan file %s does not exist" % args.plan, origin="SandBoxExec.run") with open(args.plan, "r") as plan_fd: plan_content = ["def main_entry_point():"] plan_content += [ " %s" % line for line in plan_fd.read().splitlines() ] plan_content = "\n".join(plan_content) env = BaseEnv() cm = PlanContext(server=env) store = None resolver = getattr( AnodContext, str(args.resolver), AnodContext.always_create_source_resolver, ) logger.debug("Using resolver %s", resolver.__name__) # Declare available actions and their signature def anod_action(module, build=None, host=None, target=None, qualifier=None): pass # all: no cover for a in ("anod_install", "anod_build", "anod_test"): cm.register_action(a, anod_action) # Load the plan and execute plan = Plan(data={}) plan.load_chunk(plan_content) actions = cm.execute(plan, "main_entry_point") ac = AnodContext(asr, default_env=env) for action in actions: ac.add_anod_action( action.module, action, action.action.replace("anod_", "", 1), action.qualifier, ) # Check if machine plan is locally schedulable action_list = ac.schedule(resolver) e = ElectrolytJobFactory(sandbox, asr, store, dry_run=args.dry_run) e.run(action_list)
def testsuite_main(self, args=None): """Main for the main testsuite script. :param args: command line arguments. If None use sys.argv :type args: list[str] | None """ self.main = Main(platform_args=self.CROSS_SUPPORT) # Add common options parser = self.main.argument_parser parser.add_argument("-o", "--output-dir", metavar="DIR", default="./out", help="select output dir") parser.add_argument("-t", "--temp-dir", metavar="DIR", default=Env().tmp_dir) parser.add_argument( "--max-consecutive-failures", default=0, help="If there are more than N consecutive failures, the testsuite" " is aborted. If set to 0 (default) then the testsuite will never" " be stopped") parser.add_argument( "--keep-old-output-dir", default=False, action="store_true", help="This is default with this testsuite framework. The option" " is kept only to keep backward compatibility of invocation with" " former framework (gnatpython.testdriver)") parser.add_argument("--disable-cleanup", dest="enable_cleanup", action="store_false", default=True, help="disable cleanup of working space") parser.add_argument( "-j", "--jobs", dest="jobs", type=int, metavar="N", default=Env().build.cpu.cores, help="Specify the number of jobs to run simultaneously") parser.add_argument( "--show-error-output", action="store_true", help="When testcases fail, display their output. This is for" " convenience for interactive use.") parser.add_argument( "--dump-environ", dest="dump_environ", action="store_true", default=False, help="Dump all environment variables in a file named environ.sh," " located in the output directory (see --output-dir). This" " file can then be sourced from a Bourne shell to recreate" " the environement that existed when this testsuite was run" " to produce a given testsuite report.") parser.add_argument('sublist', metavar='tests', nargs='*', default=[], help='test') # Add user defined options self.add_options() # parse options self.main.parse_args(args) self.env = BaseEnv.from_env() self.env.root_dir = self.root_dir self.env.test_dir = self.test_dir # At this stage compute commonly used paths # Keep the working dir as short as possible, to avoid the risk # of having a path that's too long (a problem often seen on # Windows, or when using WRS tools that have their own max path # limitations). # Note that we do make sure that working_dir is an absolute # path, as we are likely to be changing directories when # running each test. A relative path would no longer work # under those circumstances. d = os.path.abspath(self.main.args.output_dir) self.output_dir = os.path.join(d, 'new') self.old_output_dir = os.path.join(d, 'old') if not os.path.isdir(self.main.args.temp_dir): logging.critical("temp dir '%s' does not exist", self.main.args.temp_dir) return 1 self.working_dir = tempfile.mkdtemp( '', 'tmp', os.path.abspath(self.main.args.temp_dir)) # Create the new output directory that will hold the results self.setup_result_dir() # Store in global env: target information and common paths self.env.output_dir = self.output_dir self.env.working_dir = self.working_dir self.env.options = self.main.args # User specific startup self.tear_up() # Retrieve the list of test self.test_list = self.get_test_list(self.main.args.sublist) # Launch the mainloop self.total_test = len(self.test_list) self.run_test = 0 self.scheduler = Scheduler(job_provider=self.job_factory, collect=self.collect_result, tokens=self.main.args.jobs) actions = DAG() for test in self.test_list: self.parse_test(actions, test) with open(os.path.join(self.output_dir, 'tests.dot'), 'wb') as fd: fd.write(actions.as_dot()) self.scheduler.run(actions) self.dump_testsuite_result() # Clean everything self.tear_down() return 0
def test_configure(): class AnodConf(Anod): @Anod.primitive() def build(self): c = Configure(self) return c.cmdline() Anod.sandbox = SandBox(root_dir=os.getcwd()) Anod.sandbox.create_dirs() ac = AnodConf(qualifier="", kind="build", jobs=10) AnodDriver(anod_instance=ac, store=None).activate(Anod.sandbox, None) ac.build_space.create() # Configure() can add $CONFIG_SHELL in the command line # Check that the two other arguments are as expected assert ac.build()["cmd"][-2:] == [ "../src/configure", "--build=%s" % ac.env.build.triplet, ] # Check with canadian env canadian_env = BaseEnv() canadian_env.set_build("x86-windows") canadian_env.set_host("x86-linux") canadian_env.set_target("arm-elf") assert canadian_env.is_canadian ac2 = AnodConf(qualifier="", kind="build", jobs=10, env=canadian_env) AnodDriver(anod_instance=ac2, store=None).activate(Anod.sandbox, None) ac2.build_space.create() ac2_cmd = ac2.build()["cmd"] assert "--build=i686-pc-mingw32" in ac2_cmd assert "--host=i686-pc-linux-gnu" in ac2_cmd assert "--target=arm-eabi" in ac2_cmd # Check with cross env cross_env = BaseEnv() cross_env.set_target("arm-elf") ac3 = AnodConf(qualifier="", kind="build", jobs=10, env=cross_env) AnodDriver(anod_instance=ac3, store=None).activate(Anod.sandbox, None) ac3.build_space.create() assert "--target=arm-eabi" in ac3.build()["cmd"]
def test_dag_2_plan(self): """Check that we can extract values from plan in final dag. Some paramaters passed in the plan are lost in the final scheduled dag, when plan lines are transformed into anod actions. It is possible to retrieve them by looking at the tags. """ # Create a new plan context ac = self.create_context() current_env = BaseEnv() cm = plan.PlanContext(server=current_env) # Declare available actions and their signature def anod_action( module, build=None, default_build=False, host=None, target=None, board=None, weathers=None, product_version=None, when_missing=None, manual_action=False, qualifier=None, jobs=None, releases=None, process_suffix=None, update_vcs=False, recursive=None, query_range=None, force_repackage=False, ): pass for a in ("anod_build", "anod_install", "anod_source", "anod_test"): cm.register_action(a, anod_action) # Create a simple plan content = [ "def myserver():", ' anod_build("spec12", weathers="foo")', ' anod_build("spec10", weathers="foo")', ' anod_build("spec11", weathers="bar")', ] with open("plan.txt", "w") as f: f.write("\n".join(content)) myplan = plan.Plan({}, plan_ext=".txt") myplan.load("plan.txt") # Execute the plan and create anod actions for action in cm.execute(myplan, "myserver"): primitive = action.action.replace("anod_", "", 1) ac.add_anod_action( name=action.module, env=current_env if action.default_build else action, primitive=primitive, qualifier=action.qualifier, plan_line=action.plan_line, plan_args=action.plan_args, ) # Create a reverse tag to have a working get_context # when looking for parameters such as weathers we want to # get the plan line that has triggered the action, e.g. # for spec3.build that has been triggered by spec10.build # we want to propagate the weathers set in the line # anod_build("spec10", weathers="foo") # in the Build action for spec3 reverse_dag = ac.tree.reverse_graph() for uid, action in ac.tree: if uid.endswith("spec12.build"): assert ac.tree.get_tag(uid) cdist, cuid, ctag = reverse_dag.get_context(uid)[0] assert cuid == uid assert ctag["plan_args"]["weathers"] == "foo" assert ctag["plan_line"] == "plan.txt:2" elif uid.endswith("spec3.build"): assert not ac.tree.get_tag(uid) cdist, cuid, ctag = reverse_dag.get_context(uid)[0] assert cuid != uid assert cuid.endswith("spec10.build") assert ctag["plan_args"]["weathers"] == "foo" assert ctag["plan_line"] == "plan.txt:3" elif uid.endswith("spec11.build"): assert ac.tree.get_tag(uid), ac.tree.tags cdist, cuid, ctag = reverse_dag.get_context(uid)[0] assert cuid == uid assert ctag["plan_args"]["weathers"] == "bar" assert ctag["plan_line"] == "plan.txt:4" # Also verify that the instance deps is properly loaded assert set(action.anod_instance.deps.keys()) == {"spec1"} assert action.anod_instance.deps[ "spec1"].__class__.__name__ == "Spec1" # Also test that we are still able to extract the values # after having scheduled the action graph. # Create an explict build action to make sure that the plan can be # scheduled ac.add_anod_action( name="spec3", env=current_env, primitive="build", plan_line="plan.txt:5", plan_args={"weathers": "my_spec3_weather"}, ) sched_dag = ac.schedule(ac.always_download_source_resolver) sched_rev = sched_dag.reverse_graph() for uid, action in sched_dag: if uid.endswith("spec12.build"): assert sched_dag.get_tag(uid) # Also verify that the instance deps is properly loaded assert set( action.anod_instance.deps.keys()) == {"spec1", "spec11"} assert (action.anod_instance.deps["spec11"].__class__.__name__ == "Spec11") assert action.anod_instance.deps[ "spec1"].__class__.__name__ == "Spec1" elif uid.endswith("spec3.build"): assert sched_dag.get_tag(uid) assert (sched_rev.get_context(uid)[0][2]["plan_args"] ["weathers"] == "my_spec3_weather")