def __init__(self, ami_id, region=None, data=None): """Inialize an AMI description object. :param ami_id: the id of the AMI :type ami_id: str :param region: region in which the AMI is present. If None then use default region :type region: None | str :param data: a dict representing the metadata of the AMI. If None then download AMI description using EC2 api :type data: dict | None """ self.ami_id = ami_id self.region = region if self.region is None: self.region = Env().aws_env.default_region if data is None: aws_env = Env().aws_env self.data = aws_env.client('ec2', self.region).describe_images( ImageIds=[self.ami_id])['Images'][0] else: self.data = data # compute tags self.tags = {el['Key']: el['Value'] for el in self.data['Tags']}
def parse_args(self, args=None): """Parse options and set console logger. :param args: the list of positional parameters. If None then ``sys.argv[1:]`` is used :type: list[str] | None """ # log_stream_format=e3.log.DEFAULT_STREAM_FMT, # log_file_format=e3.log.DEFAULT_FILE_FMT, # log_datefmt=None, # log_filename=None, self.args = self.argument_parser.parse_args(args) if not self.__log_handlers_set: # First set level of verbosity if self.args.verbose: level = e3.log.RAW else: level = self.args.loglevel e3.log.activate(level=level, filename=self.args.log_file, e3_debug=self.args.verbose > 1) self.__log_handlers_set = True # Export options to env e = Env() e.main_options = self.args if hasattr(self.args, 'e3_main_platform_args_supported'): e3.log.debug('parsing --build/--host/--target') # Handle --build, --host, and --target arguments e.set_env(self.args.build, self.args.host, self.args.target)
def parse_args(self, args: Optional[List[str]] = None, known_args_only: bool = False) -> None: """Parse options and set console logger. :param args: the list of positional parameters. If None then ``sys.argv[1:]`` is used :param known_args_only: does not produce an error when extra arguments are present """ if known_args_only: self.args, _ = self.argument_parser.parse_known_args(args) else: self.args = self.argument_parser.parse_args(args) if not self.__log_handlers_set: e3.log.activate_with_args(self.args, logging.INFO) self.__log_handlers_set = True # Export options to env e = Env() e.main_options = self.args if hasattr(self.args, "e3_main_platform_args_supported"): e3.log.debug("parsing --build/--host/--target") # Handle --build, --host, and --target arguments e.set_env(self.args.build, self.args.host, self.args.target)
def set_up(self): # Set a gnatdebug common to all tests os.environ['ADA_DEBUG_FILE'] = os.path.join(self.test_dir, 'tests', 'gnatdebug') # The following are used by the internal testsuite os.environ['GNATSTUDIO_TESTSUITE_SCRIPTS'] = os.path.join( os.path.dirname(__file__), 'internal', 'scripts') os.environ['GNATSTUDIO_GVD_TESTSUITE'] = os.path.join( os.path.dirname(__file__), 'internal', 'gvd_testsuite') os.environ['GPS_SRC_DIR'] = os.path.join(os.path.dirname(__file__), '..') os.environ['PYTHONPATH'] = "{}{}{}".format( os.path.join(os.path.dirname(__file__), 'internal', 'tests'), os.path.pathsep, os.environ.get('PYTHONPATH', '')) os.environ['GPS_TEST_CONTEXT'] = 'nightly' os.environ['CODEPEER_DEFAULT_LEVEL'] = '3' # Launch Xvfb if needs be self.xvfb = None if (not self.main.args.noxvfb) and Env().platform.endswith('linux'): Xvfbs.start_displays(DEFAULT_XVFB_DISPLAY, self.main.args.jobs) # Export the WINDOWS_DESKTOP environment variable to # test GPS in a separate virtual desktop on Windows if Env().build.os.name == 'windows': os.environ['WINDOWS_DESKTOP'] = "gps_desktop"
def __init__(self, regions=None, stub=False, profile=None): """Initialize an AWS session. Once intialized AWS environment can be accessed from Env().aws_env :param regions: list of regions to work on. The first region is considered as the default region. :type regions: list[str] :param stub: if True clients are necessarily stubbed :type stub: bool :param profile: profile name :type profile: str | None """ self.session = botocore.session.Session(profile=profile) self.profile = profile if regions is None: self.regions = [self.session.region_name] else: self.regions = regions self.default_region = None self.force_stub = stub self.clients = {} self.stubbers = {} env = Env() env.aws_env = self
def args_to_env(args): """ Create a e3.env.Env instance according to the platform optiong in ``args``. """ result = Env() result.set_env(args.build, args.host, args.target) return result
def wrapper(*args, **kwargs): aws_env = Env().aws_env if "region" in kwargs: region = kwargs["region"] del kwargs["region"] else: region = aws_env.default_region client = aws_env.client(name, region=region) return func(*args, client=client, **kwargs)
def ls(cls): """List user AMIs.""" aws_env = Env().aws_env result = [] for r in aws_env.regions: c = aws_env.client('ec2', r) region_result = c.describe_images(Owners=['self']) for ami in region_result['Images']: result.append(AMI(ami['ImageId'], r, data=ami)) return result
def run(self): try: rm(self.data.anod_instance.build_space.build_dir, recursive=True) mkdir(self.data.anod_instance.build_space.build_dir) rm(self.data.anod_instance.build_space.install_dir, recursive=True) mkdir(self.data.anod_instance.build_space.install_dir) Env().store() cd(self.data.anod_instance.build_space.build_dir) self.data.anod_instance.jobs = Env().build.cpu.cores self.data.anod_instance.build() Env().restore() self.run_status = ReturnValue.success except Exception: logging.exception("got exception while building") self.run_status = ReturnValue.failure
def tear_up(self): # Set a gnatdebug common to all tests os.environ['ADA_DEBUG_FILE'] = os.path.join(self.test_dir, 'gnatdebug') # Launch Xvfb if needs be self.xvfb = None if (not self.main.args.noxvfb) and Env().platform.endswith('linux'): self.xvfb = Xvfb(1) os.environ['DISPLAY'] = ':1' # Export the WINDOWS_DESKTOP environment variable to # test GPS in a separate virtual desktop on Windows if Env().build.os.name == 'windows': os.environ['WINDOWS_DESKTOP'] = "gps_desktop"
def make_gnatcoll_for_gcov(work_dir, components): """Build gnatcoll core with gcov instrumentation. :param work_dir: working directory. gnatcoll is built in `build` subdir and installed in `install` subdir :type work_dir: str :return: a triplet (project path, source path, object path) :rtype: (str, str, str) :raise AssertError: in case compilation of installation fails """ logging.info('Compiling gnatcoll with gcov instrumentation') build_dir = os.path.join(work_dir, 'build') install_dir = os.path.join(work_dir, 'install') mkdir(build_dir) mkdir(install_dir) # Add the resulting library into the GPR path Env().add_search_path('GPR_PROJECT_PATH', os.path.join(install_dir, 'share', 'gpr')) Env().add_path(os.path.join(install_dir, 'bin')) for component in components: logging.info('Compiling: %s', component) gcov_options = '-cargs -fprofile-arcs -ftest-coverage -gargs' component_dir = COMPONENT_PROPERTIES[component].get( 'component', component) if COMPONENT_PROPERTIES[component].get('is_bin'): gcov_options += ' -largs -lgcov -gargs' make_gnatcoll_cmd = [ 'make', '-f', os.path.join(GNATCOLL_ROOT_DIR, component_dir, 'Makefile'), 'BUILD=DEBUG', 'GPRBUILD_OPTIONS=%s' % gcov_options, 'ENABLE_SHARED=no'] + \ COMPONENT_PROPERTIES[component].get('make_args', []) p = Run(make_gnatcoll_cmd, cwd=build_dir) assert p.status == 0, "gnatcoll build failed:\n%s" % p.out p = Run(make_gnatcoll_cmd + ['prefix=%s' % install_dir, 'install'], cwd=build_dir) assert p.status == 0, "gnatcoll installation failed:\n%s" % p.out return (os.path.join(install_dir, 'share', 'gpr'), os.path.join(install_dir, 'include'), os.path.join(build_dir, 'obj', 'static'))
def execute(self, args=None, known_args_only=False, aws_env=None): """Execute application and return exit status. See parse_args arguments. """ super(CFNMain, self).parse_args(args, known_args_only) if aws_env is not None: self.aws_env = aws_env else: if self.assume_role: main_session = Session(regions=self.regions, profile=self.args.profile) self.aws_env = main_session.assume_role( self.assume_role[0], self.assume_role[1] ) # ??? needed since we still use a global variable for AWSEnv Env().aws_env = self.aws_env else: self.aws_env = AWSEnv(regions=self.regions, profile=self.args.profile) self.aws_env.default_region = self.args.region return_val = 0 stacks = self.create_stack() if isinstance(stacks, list): for stack in stacks: return_val = self.execute_for_stack(stack) # Stop at first failure if return_val: return return_val else: return_val = self.execute_for_stack(stacks) return return_val
def __init__(self, prefix, publish=False, nsis=None, version=None, pkg_ext='.tar.gz', no_root_dir=False, **kwargs): """Create a binary package. :param prefix: prefix of the package to create, the name will be {prefix}-{version}-{platform}-bin.{exe,tar.gz} The version is by default set to Anod.sandbox.build_version and can be overridden by the version callback. :type prefix: str :param publish: if True, publish the package :type publish: bool :param nsis: a callback returning a dictionary containing needed data to build an NSIS package. :type nsis: () -> dict[str][str] | None :param version: a callback returning the package version, if None the version is set to Anod.sandbox.build_version :type version: () -> str | None :param pkg_ext: set the extension of the binary package (ignored when nsis is used). The default is .tar.gz. :type pkg_ext: str :param no_root_dir: Create package without the root directory (zip only) :type no_root_dir: bool """ # Ignore all unsupported parameters del kwargs self.prefix = prefix self.name = prefix + '-{version}-{platform}-bin' self.platform = Env().platform self.publish = publish self.pkg_ext = pkg_ext self.no_root_dir = no_root_dir self.nsis = nsis self.version = version
def main(): from e3.env import Env import e3.main m = e3.main.Main(platform_args=True) m.argument_parser.add_argument( "--platform-info", choices={"build", "host", "target"}, help="Show build/host/target platform info", ) m.argument_parser.add_argument( "--version", help="Show E3 version", action="store_true" ) m.argument_parser.add_argument( "--check", help="Run e3 sanity checking", action="store_true" ) m.parse_args() if m.args.version: print(version()) return elif m.args.check: errors = sanity_check() if errors: # defensive code logger.error("sanity checking failed!") sys.exit(1) else: print("Everything OK!") return elif m.args.platform_info: print(getattr(Env(), m.args.platform_info))
def env_protect(request): """Protection against environment change. The fixture is enabled for all tests and does the following: * store/restore env between each tests * create a temporary directory and do a cd to it before each test. The directory is automatically removed when test ends """ Env().store() tempd = tempfile.mkdtemp() cd(tempd) project_marker = request.node.get_closest_marker("data_dir") if project_marker is not None: project_dir = os.path.join(ROOT_DIR, "projects", project_marker.args[0]) logging.debug(f"use project dir {project_dir}") sync_tree(project_dir, tempd) def restore_env(): Env().restore() rm(tempd, True) request.addfinalizer(restore_env)
def main(): from e3.env import Env import e3.main m = e3.main.Main(platform_args=True) m.argument_parser.add_argument('--platform-info', choices={'build', 'host', 'target'}, help='Show build/host/target platform info') m.argument_parser.add_argument('--version', help='Show E3 version', action='store_true') m.argument_parser.add_argument('--check', help='Run e3 sanity checking', action='store_true') m.parse_args() if m.args.version: print(version()) return elif m.args.check: errors = sanity_check() if errors: # defensive code logger.error('sanity checking failed!') sys.exit(1) else: print('Everything OK!') return elif m.args.platform_info: print(getattr(Env(), m.args.platform_info))
def test_fingerprint(): f1 = Fingerprint() f1.add('foo', '2') f2 = Fingerprint() f2.add('foo', '4') f12_diff = f2.compare_to(f1) assert f12_diff['new'] == set([]) assert f12_diff['updated'] == {'foo'} assert f12_diff['obsolete'] == set([]) f3 = Fingerprint() f3.add_file(__file__) f23_diff = f3.compare_to(f2) assert f23_diff['new'] == {'foo'} assert f23_diff['updated'] == set([]) assert f23_diff['obsolete'] == {os.path.basename(__file__)} assert f1.sha1() != f2.sha1() != f3.sha1() assert Env().build.os.version in str(f3) f4 = Fingerprint() f4.add_file(__file__) assert f4 == f3 f5 = Fingerprint() with pytest.raises(AnodError) as err: f5.add('f4', f4) assert 'f4 should be a string' in str(err.value)
def wrapper(*args, **kwargs): if 'session' in kwargs: session = kwargs.get('session') del kwargs['session'] else: session = Env().aws_env return func(*args, session=session, **kwargs)
def wrapper(*args, **kwargs): if "session" in kwargs: session = kwargs.get("session") del kwargs["session"] else: session = Env().aws_env return func(*args, session=session, **kwargs)
def probe(self, testsuite: TestsuiteCore, dirpath: str, dirnames: List[str], filenames: List[str]) -> TestFinderResult: # There is a testcase iff there is a "test.yaml" file if "test.yaml" not in filenames: return None test_name = testsuite.test_name(dirpath) yaml_file = os.path.join(dirpath, "test.yaml") # Load the YAML file to build the test environment try: test_env = e3.yaml.load_with_config(yaml_file, Env().to_dict()) except e3.yaml.YamlError: raise ProbingError( "invalid syntax for test.yaml in '{}'".format(test_name)) # Ensure that the test_env act like a dictionary. We still accept None # as it's a shortcut for "just use default driver" configuration files. if test_env is None: test_env = {} elif not isinstance(test_env, collections.abc.Mapping): raise ProbingError( "invalid format for test.yaml in '{}'".format(test_name)) driver_name = test_env.get("driver") if driver_name is None: driver_cls = None else: try: driver_cls = testsuite.test_driver_map[driver_name] except KeyError: raise ProbingError( "cannot find driver for test '{}'".format(test_name)) return ParsedTest(test_name, driver_cls, test_env, dirpath)
def __init__(self, regions=None, stub=False, profile=None): """Initialize an AWS session. Once intialized AWS environment can be accessed from Env().aws_env :param regions: list of regions to work on. The first region is considered as the default region. :type regions: list[str] :param stub: if True clients are necessarily stubbed :type stub: bool :param profile: profile name :type profile: str | None """ super().__init__(regions=regions, stub=stub, profile=profile) env = Env() env.aws_env = self
def __init__(self): """Initialise a new fingerprint instance.""" self.elements = {} self.add("os_version", Env().build.os.version) # ??? add more detailed information about the build machine so that # even a minor OS upgrade trigger a rebuild self.add("fingerprint_version", FINGERPRINT_VERSION)
def runcross(target, board, bin, output='runcross.out'): Env().set_target(target, '', board) p = run_cross([bin], output=output, timeout=5) if p.status != 0: print("runcross failed:") print(contents_of(output))
def __init__(self, region: str): """Initialize context manager. :param region: default region """ aws_env = Env().aws_env self.previous_region = aws_env.default_region self.default_region = region
def parse_args(self, args: Optional[List[str]] = None, known_args_only: bool = False) -> None: """Parse options and set console logger. :param args: the list of positional parameters. If None then ``sys.argv[1:]`` is used :param known_args_only: does not produce an error when extra arguments are present """ if known_args_only: self.args, _ = self.argument_parser.parse_known_args(args) else: self.args = self.argument_parser.parse_args(args) if self.args.nocolor: e3.log.pretty_cli = False if not self.__log_handlers_set: # First set level of verbosity if self.args.verbose: level = logging.DEBUG else: level = self.args.loglevel if self.args.console_logs: e3.log.console_logs = self.args.console_logs e3.log.activate( level=level, filename=self.args.log_file, json_format=self.args.json_logs, e3_debug=self.args.verbose > 1, ) self.__log_handlers_set = True # Export options to env e = Env() e.main_options = self.args if hasattr(self.args, "e3_main_platform_args_supported"): e3.log.debug("parsing --build/--host/--target") # Handle --build, --host, and --target arguments e.set_env(self.args.build, self.args.host, self.args.target)
def ls(cls, filters=None): """List user AMIs. :param filters: same as Filters parameters of describe_images (see botocore) :type filters: dict :return a list of images :rtype: list[AMI] """ aws_env = Env().aws_env if filters is None: filters = [] result = [] for r in aws_env.regions: c = aws_env.client('ec2', r) region_result = c.describe_images(Owners=['self'], Filters=filters) for ami in region_result['Images']: result.append(AMI(ami['ImageId'], r, data=ami)) return result
def tear_up(self): # Set a gnatdebug common to all tests os.environ['ADA_DEBUG_FILE'] = os.path.join(self.test_dir, 'gnatdebug') # Launch Xvfb if needs be self.xvfb = None if (not self.main.args.noxvfb) and Env().platform.endswith('linux'): self.xvfb = Xvfb(1) os.environ['DISPLAY'] = ':1'
def __init__( self, dag: DAG, job_factory: JobFactoryCallback, collect_result: CollectResultCallback, jobs: int = 0, dyn_poll_interval: bool = True, ): """Initialize a MultiprocessScheduler instance. :param dag: DAG in which nodes represent units of work to do and edges represent dependencies between them. :param job_factory: Callback to turn DAG nodes into corresponding Worker instances. :param collect_result: Callback to extract work result from a worker. :param jobs: Maximum of worker allowed to run in parallel. If left to 0, use the number of available cores on the current machine. :param dyn_poll_interval: If True the interval between each polling iteration is automatically updated. Otherwise it's set to 0.1 seconds. """ e = Env() self.parallelism = jobs or e.build.cpu.cores self.dag = dag self.workers: List[Optional[SomeWorker]] = [None] * self.parallelism """ List of active workers. Indexes in this list correspond to slot IDs passed to workers: `self.workers[N].slot == N` for all present wor,kers. When the worker is done, we just replace it with None, and when a slot is None we can create a new worker for it. """ self.iterator = DAGIterator(self.dag, enable_busy_state=True) """Iterator to get ready-to-run units of work.""" self.job_factory = job_factory self.collect_result = collect_result self.active_workers = 0 """Equivalent to the number of non-None slots in ``self.workers``.""" self.poll_interval = 0.1 """Time (in seconds) to wait between each round of worker polling.""" self.dyn_poll_interval = dyn_poll_interval self.no_free_item = False """ True if there is work waiting to be executed, False if all work to be scheduled depends on work that hasn't completed. """ self.no_work_left = False """
def set_cfn_init(self, stack, config='init', cfn_init='/usr/local/bin/cfn-init', region=None, resource=None, metadata=None, init_script=''): """Add CFN init call on first boot of the instance. :param stack: name of the stack containing the cfn metadata :type stack: str :param config: name of the configset to be launch (default: init) :type config: str :param cfn_init: location of cfn-init on the instance (default: /usr/local/bin/cfn-init) :type cfn_init: str :param region: AWS region. if not specified use current default region :type region: name | None :param resource: resource in which the metadata will be added. Default is to use current resource :type resource: str | None :param metadata: dict conforming to AWS::CloudFormation::Init specifications :type metadata: dict | None :param init_script: command to launch after cfn-init :type init_script: powershell command for windows and bash command for linuxes """ if region is None: region = Env().aws_env.default_region if resource is None: resource = self.name if self.image.is_windows: self.add_user_data( 'powershell', CFN_INIT_STARTUP_SCRIPT_WIN % { 'region': region, 'stack': stack, 'resource': resource, 'cfn_init': cfn_init, 'config': config } + init_script) self.add_user_data('persist', 'true') else: self.add_user_data( 'x-shellscript', CFN_INIT_STARTUP_SCRIPT % { 'region': region, 'stack': stack, 'resource': resource, 'cfn_init': cfn_init, 'config': config } + init_script, 'init.sh') if metadata is not None: self.metadata['AWS::CloudFormation::Init'] = metadata
def SCOV_CARGS(options): """ Compilation args needed by tests exercising source coverage, passed by default to build invocations issued through the gprbuild() API. These depend on the (provided) toplevel testsuite command line options, indicating for example whether we do a run based on binary traces or on source instrumentation. """ # Working from binary traces relies on specific properties: if options.trace_mode == 'bin': # Critical conditional branches must be preserved, source # coverage obligations need to be generated by the compiler, # and debug info is needed. cargs = ["-fpreserve-control-flow", "-fdump-scos", "-g"] # Proper support of inlining or generics requires advanced debug # info features possibly disabled by default on some targets. We # could enforce this with an explicit option unconditionally, but # it is cleaner and simpler to have this exposed only when needed # in qualification material. # In addition, identification of call targets, necessary to # recognize exception edges, currently requires call-site debug # info on partially linked objects, which we'll only get for long # calls on powerpc. if 'vxworks' in Env().target.triplet: cargs.append("-gno-strict-dwarf") if 'powerpc' in Env().target.triplet: cargs.append("-mlongcall") return cargs else: return []
def set_up(self): base = os.path.dirname(__file__) # Set a gnatdebug common to all tests os.environ['ADA_DEBUG_FILE'] = os.path.join( self.test_dir, 'tests', 'gnatdebug') # The following are used by the internal testsuite os.environ['GNATSTUDIO_TESTSUITE_SCRIPTS'] = os.path.join( base, 'internal', 'scripts') os.environ['GNATSTUDIO_GVD_TESTSUITE'] = os.path.join( base, 'internal', 'gvd_testsuite') os.environ['GPS_SRC_DIR'] = os.path.join(base, '..') os.environ['PYTHONPATH'] = "{}{}{}".format( os.path.join(base, 'internal', 'tests'), os.path.pathsep, os.environ.get('PYTHONPATH', '')) os.environ['GPS_TEST_CONTEXT'] = 'nightly' os.environ['CODEPEER_DEFAULT_LEVEL'] = '3' # Prepare valgrind command line self.env.wait_factor = 1 self.env.valgrind_cmd = [] if self.env.options.valgrind_memcheck: self.env.valgrind_cmd = [find_executable("valgrind") ] + [opt.format(base=base) for opt in VALGRIND_OPTIONS] self.env.wait_factor = 10 # valgrind is slow # Launch Xvfb if needs be self.xvfb = None if (not self.main.args.noxvfb) and Env().platform.endswith('linux'): Xvfbs.start_displays(DEFAULT_XVFB_DISPLAY, self.main.args.jobs) # Export the WINDOWS_DESKTOP environment variable to # test GPS in a separate virtual desktop on Windows if (not self.main.args.noxvfb) and Env().build.os.name == 'windows': os.environ['WINDOWS_DESKTOP'] = "gps_desktop"