def get_path_for_testresults() -> str: """ Returns the path to the testresults directory. :returns: The path to the testresults directory ..note : This path is generally only used by the commandline because it can intentionally provide a different result than get_path_for_output if the current job is not a TESTRUN job. If your running in a TESTRUN job, utilize get_path_for_output instead of this API. """ global DIR_TESTRESULTS_DIRECTORY if DIR_TESTRESULTS_DIRECTORY is None: ctx = Context() configuration = ctx.lookup("/configuration") tr_dir = configuration.lookup("/paths/testresults") DIR_TESTRESULTS_DIRECTORY = tr_dir if not os.path.exists(DIR_TESTRESULTS_DIRECTORY): os.makedirs(DIR_TESTRESULTS_DIRECTORY) return DIR_TESTRESULTS_DIRECTORY
def command_akit_utilities_outputfolder(runtime_file, starttime): # IMPORTANT: We need to load the context first because it will trigger the loading # of the default user configuration from akit.environment.context import Context from akit.environment.variables import JOB_TYPES from akit.environment.optionoverrides import override_starttime, override_config_runtime ctx = Context() env = ctx.lookup("/environment") # We need to set the job type before we trigger activation. env["jobtype"] = JOB_TYPES.CONSOLE import akit.activation.console if runtime_file is not None: override_config_runtime(runtime_file) if starttime is not None: override_starttime(starttime) from akit.paths import get_path_for_testresults ts_string = get_path_for_testresults() print(ts_string) return
def get_summary_static_resource_src_dir() -> str: """ Returns the path that is the source path for the test summary static resources. """ ctx = Context() res_dir = ctx.lookup(ContextPaths.DIR_RESULTS_RESOURCE_SRC) return res_dir
def get_filename_for_topology() -> str: """ Returns the path to the topology file. """ filename = None ctx = Context() filename = get_expanded_path(ctx.lookup(ContextPaths.CONFIG_FILE_TOPOLOGY)) return filename
def get_filename_for_credentials() -> str: """ Returns the path to the credentials file. """ filename = None ctx = Context() file_confg = ctx.lookup(ContextPaths.CONFIG_FILE_CREDENTIALS) filename = get_expanded_path(file_confg) return filename
def get_filename_for_runtime() -> str: """ Returns the path to the landscape file. """ filename = None ctx = Context() file_confg = ctx.lookup(ContextPaths.CONFIG_FILE_RUNTIME) filename = get_expanded_path(file_confg) return filename
def get_summary_static_resource_dest_dir(create=True) -> str: """ Returns the path where the static resources for test summaries should be published. """ ctx = Context() res_dir = ctx.lookup(ContextPaths.DIR_RESULTS_RESOURCE_DEST) if create and not os.path.exists(res_dir): os.makedirs(res_dir) return res_dir
def lookup_database_connection_factory(conn_profile: str): global database_connection_factories conn_factory = None if conn_profile in database_connection_factories: conn_factory = database_connection_factories[conn_profile] else: from akit.environment.context import Context ctx = Context() conn_info = None rcdatabases = ctx.lookup(ContextPaths.DATABASES) if rcdatabases is not None and conn_profile in rcdatabases: conn_info = rcdatabases[conn_profile].value else: from akit.interop.landscaping.landscape import Landscape lscape = Landscape() lsdatabases = lscape.databases if lsdatabases is not None and conn_profile in lsdatabases: conn_info = lsdatabases[conn_profile] if conn_info is not None: if "conntype" in conn_info: conntype = conn_info["conntype"].lower() if conntype == "basic": conn_factory = BasicDatabaseConnectionFactory( conn_profile, **conn_info) elif conntype == "basic-tcp": conn_factory = BasicTcpDatabaseConnectionFactory( conn_profile, **conn_info) else: errmsg = "Unknown database connection type. connection={} conntype={}".format( conn_profile, conntype) raise AKitConfigurationError(errmsg) database_connection_factories[conn_profile] = conn_factory else: errmsg = "Database connection entries must have a 'conntype' entry. connection={}".format( conn_profile) raise AKitConfigurationError(errmsg) else: errmsg = "Database connection not found. connection={}".format( conn_profile) raise AKitConfigurationError(errmsg) return conn_factory
def get_path_for_output(create=True) -> str: """ Returns the timestamped path where test results and artifacts are deposited to """ global DIR_RESULTS_DIRECTORY if DIR_RESULTS_DIRECTORY is None: ctx = Context() DIR_RESULTS_DIRECTORY = get_expanded_path(ctx.lookup(ContextPaths.OUTPUT_DIRECTORY)) if create and not os.path.exists(DIR_RESULTS_DIRECTORY): os.makedirs(DIR_RESULTS_DIRECTORY) return DIR_RESULTS_DIRECTORY
def logging_initialize(): """ Method used to configure the automation kit logging based on the environmental parameters specified and then reinitialize the logging. """ global logging_initialized if not logging_initialized: logging_initialized = True ctx = Context() env = ctx.lookup("/environment") conf = ctx.lookup("/configuration") logging_conf = conf["logging"] log_levels = logging_conf["levels"] consolelevel = AKIT_VARIABLES.AKIT_LOG_LEVEL_CONSOLE logfilelevel = AKIT_VARIABLES.AKIT_LOG_LEVEL_FILE logname_template = logging_conf["logname"] logname = env.fill_template(logname_template) output_directory = env["output_directory"] if not os.path.exists(output_directory): os.makedirs(output_directory) log_branches = [] if "branched" in logging_conf: log_branches = logging_conf["branched"] # Setup the log files _reinitialize_logging(consolelevel, logfilelevel, output_directory, logname, log_branches) return
def get_summary_html_template_source() -> str: """ Looks up a source path for the summary html template. :returns: The path to the html summary template """ from akit.environment.context import Context ctx = Context() template = ctx.lookup(ContextPaths.FILE_RESULTS_TEMPLATE) if template is None: errmsg = "Error attempting to lookup the summary html template source." raise AKitRuntimeError(errmsg) return template
def command_akit_workflow_run(workflow, output=None, start=None, console_level=None, logfile_level=None): # pylint: disable=unused-import,import-outside-toplevel # We do the imports of the automation framework code inside the action functions because # we don't want to startup loggin and the processing of inputs and environment variables # until we have entered an action method. Thats way we know how to setup the environment. # IMPORTANT: We need to load the context first because it will trigger the loading # of the default user configuration from akit.environment.context import Context from akit.environment.variables import extend_path ctx = Context() env = ctx.lookup("/environment") workflow_file = os.path.abspath( os.path.expanduser(os.path.expandvars(workflow))) if not os.path.exists(workflow_file): error_msg = "The specified workflow file does not exist. file=%s" % workflow_file raise click.BadParameter(error_msg) workflow_info = None with open(workflow_file, 'r') as wpf: wpfcontent = wpf.read() workflow_info = yaml.safe_load(wpfcontent) if workflow_info is not None: from akit.workflow.entrypoints import run_workflow_entrypoint # Run the work packet run_workflow_entrypoint(workflow_file, workflow_info) else: error_msg = "Failure loading the work packet info from. file=%s" % workflow_file raise click.BadParameter(error_msg) return
def __init__(self): self._service_lock = threading.RLock() self._device_ref = None self._soap_processor = SoapProcessor() self._host = None self._baseURL = None self._controlURL = None self._eventSubURL = None self._SCPDURL = None self._serviceType = None self._serviceId = None self._validate_parameter_values = True self._subscription_id = None self._subscription_expiration = None self._default_variables = {} self._create_default_variables_from_list() # Initialize the evented variable sink super().__init__(self.SERVICE_EVENT_VARIABLES, state_lock=self._service_lock, sink_prefix=self.SERVICE_TYPE, auto_renew_subscriptions=True) self._logged_events = None if self.SERVICE_TYPE is not None: ctx = Context() logged_events_by_service = ctx.lookup( ContextPaths.UPNP_LOGGED_EVENTS) if logged_events_by_service is not None and self.SERVICE_TYPE in logged_events_by_service: self._logged_events = logged_events_by_service[ self.SERVICE_TYPE] return
def load(self, landscape_file: str, log_to_directory: Optional[str]=None): """ Loads and validates the landscape description file. """ logger = getAutomatonKitLogger() landscape_info = None with open(landscape_file, 'r') as lf: lfcontent = lf.read() landscape_info = yaml.safe_load(lfcontent) if log_to_directory is not None: try: landscape_file_basename = os.path.basename(landscape_file) landscape_file_basename, landscape_file_ext = os.path.splitext(landscape_file_basename) landscape_file_copy = os.path.join(log_to_directory, "landscape-declared{}".format(landscape_file_ext)) shutil.copy2(landscape_file, landscape_file_copy) # Create a json copy of the landscape file until the time when we can # parse yaml in the test summary javascript. landscape_info_copy = copy.deepcopy(landscape_info) landscape_file_copy = os.path.join(log_to_directory, "landscape-declared.json") with open(landscape_file_copy, 'w') as lsf: json.dump(landscape_info_copy, lsf, indent=4) except Exception as xcpt: err_msg = "Error while logging the landscape file (%s)%s%s" % ( landscape_file, os.linesep, traceback.format_exc()) raise AKitRuntimeError(err_msg) from xcpt errors, warnings = self.validate_landscape(landscape_info) if len(errors) > 0: errmsg_lines = [ "ERROR Landscape validation failures:" ] for err in errors: errmsg_lines.append(" %s" % err) errmsg = os.linesep.join(errmsg_lines) raise AKitConfigurationError(errmsg) from None if len(warnings) > 0: for wrn in warnings: logger.warn("Landscape Configuration Warning: (%s)" % wrn) if "devices" in landscape_info["pod"]: devices = landscape_info["pod"]["devices"] device_lookup_table = {} for dev in devices: dev_type = dev["deviceType"] if dev_type == "network/upnp": dkey = "UPNP:{}".format(dev["upnp"]["USN"]).upper() device_lookup_table[dkey] = dev elif dev_type == "network/ssh": dkey = "SSH:{}".format(dev["host"]).upper() device_lookup_table[dkey] = dev ctx = Context() skipped_devices = ctx.lookup(ContextPaths.SKIPPED_DEVICES, default=[]) for dev_key in skipped_devices: dev_key = dev_key.upper() if dev_key in device_lookup_table: device = device_lookup_table[dev_key] device["skip"] = True return landscape_info
# has the highest priority. runtime_config = load_runtime_configuration() CONFIGURATION_MAP.maps.append(runtime_config) # User config is last in the map list because runtime # overrides user configuration, we do it this way because # the runtime configuration might be generated by the # CI system. The user configuration is persisted and # can have settings that stay with the machine. user_config = load_user_configuration() CONFIGURATION_MAP.maps.append(user_config) # Activation Step - 5: Write the information into the context that is not persisted and is # instance specific. These variables are stored in the environment section # of the configuration and overall global context. env = ctx.lookup("/environment") env["apod"] = AKIT_VARIABLES.AKIT_APOD_NAME env["build"] = { "branch": AKIT_VARIABLES.AKIT_BUILD_BRANCH, "name": AKIT_VARIABLES.AKIT_BUILD_NAME, "flavor": AKIT_VARIABLES.AKIT_BUILD_FLAVOR } env["breakpoints"] = AKIT_VARIABLES.AKIT_BREAKPOINTS env["debugger"] = AKIT_VARIABLES.AKIT_DEBUGGER env["testroot"] = AKIT_VARIABLES.AKIT_TESTROOT env["jobtype"] = AKIT_VARIABLES.AKIT_JOBTYPE env["starttime"] = AKIT_VARIABLES.AKIT_STARTTIME env["runid"] = AKIT_VARIABLES.AKIT_RUNID
def generic_test_entrypoint(): """ This is the generic test entry point for test modules. It provides a standardized set of commanline parameters that can be used to run test files as scripts. .. note:: The `generic_test_entrypoint` is a useful tool to place at the bottom of test files to allow them to easily be run for debugging purposes. """ # We must exit with a result code, initialize it to 0 here result_code = 0 base_parser = argparse.ArgumentParser() base_parser.add_argument("-i", "--include", dest="includes", action="append", default=[], help="Add an include search statement.") base_parser.add_argument("-x", "--exclude", dest="excludes", action="append", default=[], help="Add an exclude filter statement.") base_parser.add_argument("--console-level", dest="consolelevel", action="store", default="INFO", choices=LOG_LEVEL_NAMES, help="The logging level for console output.") base_parser.add_argument("--logfile-level", dest="logfilelevel", action="store", default="DEBUG", choices=LOG_LEVEL_NAMES, help="The logging level for logfile output.") test_module = sys.modules["__main__"] ctx = Context() env = ctx.lookup("/environment") # Set the jobtype env["jobtype"] = "testrun" test_results_dir = get_path_for_testresults() if not os.path.exists(test_results_dir): os.makedirs(test_results_dir) env["output_directory"] = test_results_dir test_root = find_testmodule_root(test_module) module_fullname = find_testmodule_fullname(test_module) # Copy the test module to the name of the module_fullname name so the loader won't reload it sys.modules[module_fullname] = test_module if test_module.__name__ == "__main__": test_module.__name__ = module_fullname # Re-map the object classes from the module over to the module name we just registered the test # module under. test_class_coll = inspect.getmembers(test_module, inspect.isclass) for testclass_name, testclass_obj in test_class_coll: tcobj_module_name = testclass_obj.__module__ if tcobj_module_name == "__main__": testclass_obj.__module__ = module_fullname args = base_parser.parse_args() logging_initialize() includes = args.includes excludes = args.excludes if len(includes) == 0: includes.append("*") result_code = 0 with DefaultTestJob(logger, test_root, includes=includes, excludes=excludes, test_module=test_module) as tjob: result_code = tjob.execute() sys.exit(result_code) return
def _reinitialize_logging(consolelevel, logfilelevel, output_dir, logfile_basename, log_branches): """ Helper method to re-initialize the logging when the path to the output directory changes shortly after startup of the framework. This method also handles the configuration of output levels, stdout and stderr file wrappers. """ consolelevel_strerr = logging.WARNING console_filter = LessThanRecordFilter(logging.WARNING) console_filter_stderr = None if isinstance(consolelevel, str): consolelevel_upper = consolelevel.upper() if consolelevel_upper == "QUIET": consolelevel = AKitLogLevels.QUIET consolelevel_strerr = AKitLogLevels.QUIET console_filter = LessThanRecordFilter(AKitLogLevels.QUIET) console_filter_stderr = LessThanRecordFilter(AKitLogLevels.QUIET) elif hasattr(logging, consolelevel_upper): consolelevel = getattr(logging, consolelevel_upper) else: consolelevel = logging.INFO else: print("") print("NOTE: Console logging set to %r" % consolelevel) print("NOTE: outputdir=%s" % output_dir) print("") consolelevel = logging.WARNING basecomp, extcomp = os.path.splitext(logfile_basename) ctx = Context() env = ctx.lookup("/environment") debug_logfilename = os.path.join(output_dir, basecomp + ".DEBUG" + extcomp) other_logfilename = os.path.join(output_dir, basecomp + ".OTHER" + extcomp) env["logfile_debug"] = debug_logfilename env["logfile_other"] = other_logfilename rel_logfilename = os.path.join(output_dir, basecomp + extcomp) logging.Logger.manager = LoggingManagerWrapper(logging.Logger.manager) # Remove all the log handlers from the root logger root_logger = logging.getLogger() for lhandler in root_logger.handlers: root_logger.removeHandler(lhandler) # Set the root logger to NOTSET, so we don't impose an effective log # level on child loggers root_logger.setLevel(logging.NOTSET) # Setup the debug logfile base_handler = LoggingDefaults.DefaultFileLoggingHandler(debug_logfilename) base_handler.setFormatter(AKitLogFormatter(DEFAULT_LOGFILE_FORMAT)) base_handler.setLevel(logging.NOTSET) root_logger.addHandler(base_handler) # Setup the other log handler and other filter, we # need to add the other log handler before adding # the relevant log handler other_handler = LoggingDefaults.DefaultFileLoggingHandler( other_logfilename) other_handler.setFormatter(AKitLogFormatter(DEFAULT_LOGFILE_FORMAT)) other_handler.setLevel(logfilelevel) for other_expr in OTHER_LOGGER_FILTERS: other_handler.addFilter(OtherFilter(other_expr)) root_logger.addHandler(other_handler) # Setup the relevant log file which will get all the # log entries from loggers that satisified a relevant # logger name prefix match rel_handler = LoggingDefaults.DefaultFileLoggingHandler(rel_logfilename) rel_handler.setFormatter(AKitLogFormatter(DEFAULT_LOGFILE_FORMAT)) rel_handler.setLevel(logfilelevel) rel_handler.addFilter(RelevantFilter()) root_logger.addHandler(rel_handler) # Setup the stdout logger with the correct console level and # filter the log entries from the stdout handler that are # greater than Info level stdout_logger = logging.StreamHandler(sys.stdout) stdout_logger.setLevel(consolelevel) stdout_logger.addFilter(console_filter) stderr_logger = logging.StreamHandler(sys.stderr) stderr_logger.setLevel(consolelevel_strerr) if console_filter_stderr is not None: stderr_logger.addFilter(console_filter_stderr) root_logger.addHandler(stdout_logger) root_logger.addHandler(stderr_logger) for binfo in log_branches: try: logger_name = binfo["name"] logfilename = binfo["logname"] log_level = binfo["loglevel"] logging_create_branch_logger(logger_name, logfilename, log_level) except Exception: # pylint: disable=broad-except errmsg = "Error configuration branch logger." + os.linesep errmsg = traceback.format_exc() root_logger.error(errmsg) akit_logger = getAutomatonKitLogger() akit_logger.section("Logging Initialized") return
def command_akit_landscape_verify(credentials_file, landscape_file, runtime_file): # pylint: disable=unused-import,import-outside-toplevel # We do the imports of the automation framework code inside the action functions because # we don't want to startup loggin and the processing of inputs and environment variables # until we have entered an action method. Thats way we know how to setup the environment. # IMPORTANT: We need to load the context first because it will trigger the loading # of the default user configuration from akit.environment.context import Context from akit.environment.contextpaths import ContextPaths from akit.environment.variables import JOB_TYPES, AKIT_VARIABLES from akit.environment.optionoverrides import (override_config_credentials, override_config_landscape, override_config_runtime) ctx = Context() env = ctx.lookup("/environment") # We need to set the job type before we trigger activation. env["jobtype"] = JOB_TYPES.COMMAND # Activate the AutomationKit environment with characteristics of a console application import akit.activation.console from akit.xlogging.foundations import logging_initialize, getAutomatonKitLogger # Initialize logging logging_initialize() logger = getAutomatonKitLogger() if credentials_file is not None: override_config_credentials(credentials_file) if landscape_file is not None: override_config_landscape(landscape_file) if runtime_file is not None: override_config_runtime(runtime_file) from akit.interop.landscaping.landscape import Landscape, startup_landscape from akit.interop.landscaping.landscapedevice import LandscapeDevice lscape: Landscape = startup_landscape(include_ssh=True, include_upnp=True) upnp_device_configs = lscape.get_upnp_device_configs() if len(upnp_device_configs): print("======================= UPNP DEVICES =======================") for dev in upnp_device_configs: skip_dev = True if "skip" in dev and dev["skip"] else False upnp_info = dev["upnp"] usn = upnp_info["USN"] modelName = upnp_info["modelName"] modelNumber = upnp_info["modelNumber"] status = "Down" lscape_dev: LandscapeDevice = lscape.lookup_device_by_keyid(usn) if lscape_dev is not None: status = lscape_dev.verify_status() dev_info_lines = [ " Model: {} - {}".format(modelName, modelNumber), " USN: {}".format(usn), " Skip: {}".format(skip_dev), " Status: {}".format(status) ] dev_info = os.linesep.join(dev_info_lines) print(dev_info) print("") ssh_devices_configs = lscape.get_ssh_device_configs(exclude_upnp=True) if len(ssh_devices_configs): print("======================= SSH DEVICES =======================") for dev in ssh_devices_configs: skip_dev = True if "skip" in dev and dev["skip"] else False host = dev["host"] status = "Down" lscape_dev: LandscapeDevice = lscape.lookup_device_by_keyid(host) if lscape_dev is not None: status = lscape_dev.verify_status() dev_info_lines = [ " HOST: {}".format(host), " Status: {}".format(status) ] dev_info = os.linesep.join(dev_info_lines) print(dev_info) print("") return
def command_akit_testing_run(root, includes, excludes, output, start, runid, branch, build, flavor, credentials_file, landscape_file, landscape_name, runtime_file, runtime_name, topology_file, topology_name, console_level, logfile_level, debugger, breakpoints, time_travel, timeportals, prerun_diagnostic, postrun_diagnostic): # pylint: disable=unused-import,import-outside-toplevel # We do the imports of the automation framework code inside the action functions because # we don't want to startup loggin and the processing of inputs and environment variables # until we have entered an action method. Thats way we know how to setup the environment. # IMPORTANT: We need to load the context first because it will trigger the loading # of the default user configuration from akit.environment.context import Context from akit.environment.contextpaths import ContextPaths from akit.environment.variables import extend_path, JOB_TYPES, AKIT_VARIABLES from akit.environment.optionoverrides import ( override_build_branch, override_build_flavor, override_build_name, override_config_credentials, override_config_landscape, override_config_landscape_name, override_config_runtime, override_config_runtime_name, override_config_topology, override_config_topology_name, override_loglevel_console, override_loglevel_file, override_output_directory, override_runid, override_starttime, override_testroot, override_debug_breakpoints, override_debug_debugger, override_timetravel, override_timeportals) ctx = Context() env = ctx.lookup("/environment") # We need to set the job type before we trigger activation. env["jobtype"] = JOB_TYPES.TESTRUN # We perform activation a little later in the testrunner.py file so we can # handle exceptions in the context of testrunner_main function import akit.activation.testrun from akit.xlogging.foundations import logging_initialize, getAutomatonKitLogger if branch is not None: override_build_branch(branch) if build is not None: override_build_name(build) if flavor is not None: override_build_flavor(flavor) if credentials_file is not None: override_config_credentials(credentials_file) if landscape_file is not None and landscape_name is not None: errmsg = "The '--landscape-file' and '--landscape-name' options should not be used together." raise click.BadOptionUsage("landscape-name", errmsg) if landscape_file is not None: override_config_landscape(landscape_file) if landscape_name is not None: override_config_landscape_name(landscape_name) if landscape_file is not None or landscape_name is not None: landscape_filename = AKIT_VARIABLES.AKIT_CONFIG_LANDSCAPE option_name = "landscape" if landscape_file is not None else "landscape-name" if not os.path.exists(landscape_filename): errmsg = "The specified landscape file does not exist. filename={}".format( landscape_filename) raise click.BadOptionUsage(option_name, errmsg) if runtime_file is not None and runtime_name is not None: errmsg = "The '--runtime-file' and '--runtime-name' options should not be used together." raise click.BadOptionUsage("runtime-name", errmsg) if runtime_file is not None: override_config_runtime(runtime_file) if runtime_name is not None: override_config_runtime_name(runtime_name) if runtime_file is not None or runtime_name is not None: runtime_filename = AKIT_VARIABLES.AKIT_CONFIG_RUNTIME option_name = "runtime" if runtime_file is not None else "runtime-name" if not os.path.exists(runtime_filename): errmsg = "The specified runtime file does not exist. filename={}".format( runtime_filename) raise click.BadOptionUsage(option_name, errmsg) if topology_file is not None and topology_name is not None: errmsg = "The '--topology-file' and '--topology-name' options should not be used together." raise click.BadOptionUsage("option_name", errmsg) if topology_file is not None: override_config_topology(topology_file) if topology_name is not None: override_config_topology_name(topology_name) if topology_file is not None or topology_name is not None: topology_filename = AKIT_VARIABLES.AKIT_CONFIG_TOPOLOGY option_name = "topology" if topology_file is not None else "topology-name" if not os.path.exists(topology_filename): errmsg = "The specified topology file does not exist. filename={}".format( topology_filename) raise click.BadOptionUsage(option_name, errmsg) if console_level is not None: override_loglevel_console(console_level) if logfile_level is not None: override_loglevel_file(logfile_level) if output is not None: override_output_directory(output) if start is not None: override_starttime(start) if runid is not None: override_runid(runid) # Process the commandline args here and then set the variables on the environment # as necessary. We need to do this before we import activate. if breakpoints is not None: override_debug_breakpoints(breakpoints) # If a breakpoint was passed bug the debugger was not, use 'debugpy' for the # default debugger. if debugger is None: override_debug_debugger('debugpy') if debugger is not None: override_debug_debugger('debugpy') if time_travel is not None: override_timetravel(time_travel) if timeportals is not None: override_timeportals(timeportals) if prerun_diagnostic: ctx.insert("/environment/configuration/diagnostics/prerun-diagnostic", {}) if postrun_diagnostic: ctx.insert("/environment/configuration/diagnostics/postrun-diagnostic", {}) if root is None: if AKIT_VARIABLES.AKIT_TESTROOT is not None: root = AKIT_VARIABLES.AKIT_TESTROOT elif ctx.lookup(ContextPaths.TESTROOT) is not None: root = ctx.lookup(ContextPaths.TESTROOT) else: root = "." test_root = os.path.abspath(os.path.expandvars(os.path.expanduser(root))) if not os.path.isdir(test_root): errmsg = "The specified root folder does not exist. root=%s" % root if test_root != root: errmsg += " expanded=%s" % test_root raise click.BadParameter(errmsg) override_testroot(root) # Make sure we extend PATH to include the test root extend_path(test_root) # Initialize logging logging_initialize() logger = getAutomatonKitLogger() from akit.extensionpoints import AKitExtensionPoints akep = AKitExtensionPoints() # At this point in the code, we either lookup an existing test job or we create a test job # from the includes, excludes or test_module TestJobType = akep.get_testplus_default_job_type() result_code = 0 with TestJobType(logger, test_root, includes=includes, excludes=excludes, branch=branch, build=build, flavor=flavor) as tjob: result_code = tjob.execute() sys.exit(result_code) return
def command_akit_testing_query(root, includes, excludes, debug): # pylint: disable=unused-import,import-outside-toplevel # We do the imports of the automation framework code inside the action functions because # we don't want to startup loggin and the processing of inputs and environment variables # until we have entered an action method. Thats way we know how to setup the environment. # IMPORTANT: We need to load the context first because it will trigger the loading # of the default user configuration from akit.environment.context import Context from akit.environment.variables import extend_path, AKIT_VARIABLES ctx = Context() env = ctx.lookup("/environment") # Set the jobtype env["jobtype"] = "testrun" test_root = None if root is not None: AKIT_VARIABLES.AKIT_TESTROOT = root elif AKIT_VARIABLES.AKIT_TESTROOT is not None: root = AKIT_VARIABLES.AKIT_TESTROOT else: root = "." test_root = os.path.abspath(os.path.expandvars(os.path.expanduser(root))) if not os.path.isdir(test_root): errmsg = "The specified root folder does not exist. root=%s" % root if test_root != root: errmsg += " expanded=%s" % test_root raise click.BadParameter(errmsg) env["testroot"] = test_root # Make sure we extend PATH to include the test root extend_path(test_root) # We use console activation because all our input output is going through the terminal import akit.activation.console from akit.xlogging.foundations import logging_initialize, getAutomatonKitLogger # Initialize logging logging_initialize() logger = getAutomatonKitLogger() from akit.extensionpoints import AKitExtensionPoints akep = AKitExtensionPoints() # At this point in the code, we either lookup an existing test job or we create a test job # from the includes, excludes or test_module TestJobType = akep.get_testplus_default_job_type() result_code = 0 with TestJobType(logger, test_root, includes=includes, excludes=excludes) as tjob: query_results = tjob.query() test_names = [tn for tn in query_results.keys()] test_names.sort() print() print("Tests:") for tname in test_names: tref = query_results[tname] print(" " + tname) param_names = [pn for pn in tref.subscriptions.keys()] param_names.sort() for pname in param_names: pinfo = tref.subscriptions[pname] print(" {}: {}".format(pname, pinfo.describe_source())) print() if len(tjob.import_errors) > 0: print("IMPORT ERRORS:", file=sys.stderr) for ifilename in tjob.import_errors: imperr_msg = ifilename print(" " + imperr_msg, file=sys.stderr) print("", file=sys.stderr) return