def pytest_sessionstart(session): actor_path = os.environ.get('LEAPP_TESTED_ACTOR', None) library_path = os.environ.get('LEAPP_TESTED_LIBRARY', None) if actor_path: repo = find_and_scan_repositories(find_repository_basedir(actor_path), include_locals=True) repo.load() actor = None # find which actor is being tested for a in repo.actors: if a.full_path == actor_path.rstrip('/'): actor = a break if not actor: return # load actor context so libraries can be imported on module level session.leapp_repository = repo session.actor_context = actor.injected_context() session.actor_context.__enter__() elif library_path: repo = find_and_scan_repositories( find_repository_basedir(library_path), include_locals=True) repo.load() os.chdir(library_path)
def cli(args): if args.logger_config and os.path.isfile(args.logger_config): os.environ['LEAPP_LOGGER_CONFIG'] = args.logger_config config_file_path = None if args.config and os.path.isfile(args.config): config_file_path = args.config if not config_file_path and find_repository_basedir('.'): config_file_path = os.path.join(find_repository_basedir('.'), '.leapp/leapp.conf') if not config_file_path or not os.path.isfile(config_file_path): config_file_path = os.environ.get('LEAPP_CONFIG') if not config_file_path or not os.path.isfile(config_file_path): config_file_path = '/etc/leapp/leapp.conf' os.environ['LEAPP_CONFIG'] = config_file_path os.environ['LEAPP_DEBUG'] = '1' if args.debug else os.environ.get( 'LEAPP_DEBUG', '0') if os.environ['LEAPP_DEBUG'] == '1' or args.verbose: os.environ['LEAPP_VERBOSE'] = '1' else: os.environ['LEAPP_VERBOSE'] = os.environ.get('LEAPP_VERBOSE', '0')
def get_config(): global _LEAPP_CONFIG if not _LEAPP_CONFIG: repository_defaults = {} if find_repository_basedir(os.environ.get('LEAPP_CONFIG', '.')): repository_defaults['repository'] = { 'root_dir': find_repository_basedir(os.environ.get('LEAPP_CONFIG', '.')), 'state_dir': '${root_dir}/.leapp', } # Backwards compatibility for older repositories that still used the 'project' terminology. repository_defaults['project'] = repository_defaults['repository'] _LEAPP_CONFIG = BetterConfigParser() for section, values in tuple(_CONFIG_DEFAULTS.items()) + tuple( repository_defaults.items()): if not _LEAPP_CONFIG.has_section(section): _LEAPP_CONFIG.add_section(section) for name, value in values.items(): if value is not None: _LEAPP_CONFIG.set(section, name, value) _LEAPP_CONFIG.read( [os.getenv('LEAPP_CONFIG', '/etc/leapp/leapp.conf')]) return _LEAPP_CONFIG
def cli(args): if args.logger_config and os.path.isfile(args.logger_config): os.environ['LEAPP_LOGGER_CONFIG'] = args.logger_config # Consider using the in repository $REPOPATH/.leapp/logger.conf to actually obey --debug / --verbose # If /etc/leapp/logger.conf or $REPOPATH/.leapp/logger.conf don't exist logging won't work in snactor. elif find_repository_basedir('.') and os.path.isfile( os.path.join(find_repository_basedir('.'), '.leapp/logger.conf')): os.environ['LEAPP_LOGGER_CONFIG'] = os.path.join( find_repository_basedir('.'), '.leapp/logger.conf') config_file_path = None if args.config and os.path.isfile(args.config): config_file_path = args.config if not config_file_path and find_repository_basedir('.'): config_file_path = os.path.join(find_repository_basedir('.'), '.leapp/leapp.conf') if not config_file_path or not os.path.isfile(config_file_path): config_file_path = os.environ.get('LEAPP_CONFIG') if not config_file_path or not os.path.isfile(config_file_path): config_file_path = '/etc/leapp/leapp.conf' os.environ['LEAPP_CONFIG'] = config_file_path os.environ['LEAPP_DEBUG'] = '1' if args.debug else os.environ.get( 'LEAPP_DEBUG', '0') if os.environ['LEAPP_DEBUG'] == '1' or args.verbose: os.environ['LEAPP_VERBOSE'] = '1' else: os.environ['LEAPP_VERBOSE'] = os.environ.get('LEAPP_VERBOSE', '0')
def pytest_collectstart(collector): if collector.nodeid: current_repo_basedir = find_repository_basedir(collector.nodeid) # loading the current repo if ( not hasattr(collector.session, "leapp_repository") or current_repo_basedir != collector.session.repo_base_dir ): repo = find_and_scan_repositories( find_repository_basedir(collector.nodeid), include_locals=True ) repo.load(skip_actors_discovery=True) collector.session.leapp_repository = repo collector.session.repo_base_dir = current_repo_basedir # we're forcing the actor context switch only when traversing new # actor if "/actors/" in str(collector.fspath) and ( not hasattr(collector.session, "current_actor_path") or collector.session.current_actor_path + os.sep not in str(collector.fspath) ): actor = None for a in collector.session.leapp_repository.actors: if a.full_path == collector.fspath.dirpath().dirname: actor = a break if not actor: logger.info("No actor found, exiting collection...") return # we need to tear down the context from the previous # actor try: collector.session.current_actor_context.__exit__( None, None, None ) except AttributeError: pass else: logger.info( "Actor %r context teardown complete", collector.session.current_actor.name, ) logger.info("Injecting actor context for %r", actor.name) collector.session.current_actor = actor collector.session.current_actor_context = actor.injected_context() collector.session.current_actor_context.__enter__() collector.session.current_actor_path = ( collector.session.current_actor.full_path ) logger.info("Actor %r context injected", actor.name)
def cli(args): log = configure_logger() basedir = find_repository_basedir('.') repository = find_and_scan_repositories(basedir, include_locals=True) try: repository.load() except LeappError as exc: sys.stderr.write(exc.message) sys.stderr.write('\n') sys.exit(1) actor_logger = log.getChild('actors') actor = repository.lookup_actor(args.actor_name) if not actor: raise CommandError('Actor "{}" not found!'.format(args.actor_name)) messaging = InProcessMessaging(stored=args.save_output) messaging.load(actor.consumes) failure = False with beautify_actor_exception(): try: actor(messaging=messaging, logger=actor_logger).run() except BaseException: failure = True raise report_errors(messaging.errors()) if failure or messaging.errors(): sys.exit(1) if args.print_output: json.dump(messaging.messages(), sys.stdout, indent=2) sys.stdout.write('\n')
def cli(args): topic_name = args.topic_name basedir = find_repository_basedir('.') basedir = os.path.join(basedir, 'topics') if not os.path.isdir(basedir): os.mkdir(basedir) topic_path = os.path.join(basedir, topic_name.lower() + '.py') if os.path.exists(topic_path): raise CommandError("File already exists: {}".format(topic_path)) topic_path = os.path.join(basedir, topic_name.lower() + '.py') topic_class_name = make_class_name(topic_name) if not topic_class_name.endswith('Topic'): topic_class_name += 'Topic' with open(topic_path, 'w') as f: f.write('''from leapp.topics import Topic class {topic_name}(Topic): name = '{topic}' '''.format(topic_name=topic_class_name, topic=make_name(topic_name))) sys.stdout.write("New topic {} has been created in {}\n".format(topic_class_name, os.path.realpath(topic_path)))
def cli(args): base_dir = find_repository_basedir('.') repository = find_and_scan_repositories(base_dir, include_locals=True) try: repository.load() except LeappError as exc: sys.stderr.write(exc.message) sys.exit(1) actors = [actor for actor in repository.actors] topics = [ topic for topic in get_topics() if _is_local(repository, topic, base_dir, all_repos=args.all) ] models = [ model for model in get_models() if _is_local(repository, model, base_dir, all_repos=args.all) ] tags = [ tag for tag in get_tags() if _is_local(repository, tag, base_dir, all_repos=args.all) ] workflows = [ workflow for workflow in get_workflows() if _is_local(repository, workflow, base_dir, all_repos=args.all) ] if not args.json: sys.stdout.write( 'Repository:\n Name: {repository}\n Path: {base_dir}\n\n'.format( repository=get_repository_name(base_dir), base_dir=base_dir)) _print_group('Actors', actors, name_resolver=lambda x: x.class_name, path_resolver=_get_actor_path) _print_group('Models', models) _print_group('Tags', tags) _print_group('Topics', topics) _print_group('Workflows', workflows) else: output = { 'repository': get_repository_name(base_dir), 'base_dir': base_dir, 'topics': dict((topic.__name__, _get_topic_details(topic)) for topic in topics), 'models': dict((model.__name__, _get_model_details(model)) for model in models), 'actors': dict((actor.class_name, _get_actor_details(actor)) for actor in actors), 'tags': dict((tag.name, _get_tag_details(tag)) for tag in tags), 'workflows': dict((workflow.__name__, _get_workflow_details(workflow)) for workflow in workflows) } json_mod.dump(output, sys.stdout, indent=2)
def impl(context=None): configure_logger() repository = find_and_scan_repositories(find_repository_basedir('.'), include_locals=True) try: repository.load() except LeappError as exc: sys.stderr.write(exc.message) sys.stderr.write('\n') sys.exit(1) wf = repository.lookup_workflow(params.name) if not wf: raise CommandError('Could not find any workflow named "{}"'.format( params.name)) instance = wf() for actor_name in params.whitelist_experimental or (): actor = repository.lookup_actor(actor_name) if actor: instance.whitelist_experimental_actor(actor) with beautify_actor_exception(): instance.run(context=context, until_phase=params.until_phase, until_actor=params.until_actor) report_errors(instance.errors)
def cli(args): model_name = args.model_name basedir = find_repository_basedir('.') basedir = os.path.join(basedir, 'models') if not os.path.isdir(basedir): os.mkdir(basedir) model_path = os.path.join(basedir, model_name.lower() + '.py') if os.path.exists(model_path): raise CommandError("File already exists: {}".format(model_path)) topic_usage = 'None # TODO: import appropriate topic and set it here' topic_import = '' if args.topic: topic_usage = args.topic topic_import = 'from leapp.topics import {}\n'.format(args.topic) with open(model_path, 'w') as f: f.write('''from leapp.models import Model, fields {topic_import} class {model_name}(Model): topic = {topic_usage} '''.format(model_name=make_class_name(model_name), topic_import=topic_import, topic_usage=topic_usage)) sys.stdout.write("New model {} has been created in {}\n".format(make_class_name(model_name), os.path.realpath(model_path)))
def cli(args): actor_name = args.actor_name basedir = find_repository_basedir('.') tag_imports = '' model_imports = '' if args.tag: tag_imports = '\nfrom leapp.tags import {}'.format(', '.join( tuple(map(lambda x: x.split('.')[0], args.tag)))) if args.consumes or args.produces: models = set((args.produces or []) + (args.consumes or [])) model_imports = '\nfrom leapp.models import {}'.format( ', '.join(models)) tags_content = '({})'.format(as_quoted_tuple(args.tag)) consumes_content = '({})'.format(as_quoted_tuple(args.consumes)) produces_content = '({})'.format(as_quoted_tuple(args.produces)) actors_dir = os.path.join(basedir, 'actors') if not os.path.isdir(actors_dir): os.mkdir(actors_dir) actor_dir = os.path.join(actors_dir, actor_name.lower()) if not os.path.isdir(actor_dir): os.mkdir(actor_dir) actor_test_dir = os.path.join(actor_dir, 'tests') if not os.path.isdir(actor_test_dir): os.mkdir(actor_test_dir) actor_path = os.path.join(actor_dir, 'actor.py') if os.path.exists(actor_path): raise CommandError("File already exists: {}".format(actor_path)) with open(actor_path, 'w') as f: f.write('''from leapp.actors import Actor{model_imports}{tag_imports} class {actor_class}(Actor): name = '{actor_name}' description = 'No description has been provided for the {actor_name} actor.' consumes = {consumes_content} produces = {produces_content} tags = {tags_content} def process(self): pass '''.format(actor_class=make_class_name(actor_name), actor_name=make_name(actor_name), tags_content=tags_content, produces_content=produces_content, consumes_content=consumes_content, model_imports=model_imports, tag_imports=tag_imports)) sys.stdout.write("New actor {} has been created at {}\n".format( make_class_name(actor_name), os.path.realpath(actor_path))) return os.path.dirname(os.path.realpath(actor_path))
def pytest_collectstart(collector): if collector.nodeid: current_repo_basedir = find_repository_basedir(str(collector.fspath)) if not current_repo_basedir: # This is not a repository return if not hasattr(collector.session, "leapp_repository"): collector.session.leapp_repository = RepositoryManager() collector.session.repo_base_dir = current_repo_basedir _load_and_add_repo(collector.session.leapp_repository, current_repo_basedir) else: if not collector.session.leapp_repository.repo_by_id( get_repository_id(current_repo_basedir)): _load_and_add_repo(collector.session.leapp_repository, current_repo_basedir) # we're forcing the actor context switch only when traversing new # actor if "/actors/" in str(collector.fspath) and ( not hasattr(collector.session, "current_actor_path") or collector.session.current_actor_path + os.sep not in str( collector.fspath)): actor = None for a in collector.session.leapp_repository.actors: if a.full_path == collector.fspath.dirpath().dirname: actor = a break if not actor: logger.info("No actor found, exiting collection...") return # we need to tear down the context from the previous # actor try: collector.session.current_actor_context.__exit__( None, None, None) except AttributeError: pass else: logger.info( "Actor %r context teardown complete", collector.session.current_actor.name, ) logger.info("Injecting actor context for %r", actor.name) collector.session.current_actor = actor collector.session.current_actor_context = actor.injected_context() collector.session.current_actor_context.__enter__() collector.session.current_actor_path = ( collector.session.current_actor.full_path) logger.info("Actor %r context injected", actor.name)
def load_repo(path): """ Load repository on demand. Do not require paths initialized if no environment is set. Allows some parts to be tested without working leapp installation. """ from leapp.utils.repository import find_repository_basedir from leapp.repository.scan import find_and_scan_repositories repo = find_and_scan_repositories(find_repository_basedir(path), include_locals=True) repo.load() return repo
def cli(args): class_name = args.class_name short_name = args.short_name name = args.name base_dir = find_repository_basedir('.') workflows_dir = os.path.join(base_dir, 'workflows') class_name = class_name or make_class_name(name) short_name = short_name or make_name(name) if not class_name.endswith('Workflow'): class_name += 'Workflow' check_call(['snactor', 'new-tag', class_name]) if not os.path.exists(workflows_dir): os.mkdir(workflows_dir) workflow_path = os.path.join(workflows_dir, make_name(name) + '.py') if not os.path.exists(workflow_path): with open(workflow_path, 'w') as f: f.write("""from leapp.workflows import Workflow from leapp.workflows.phases import Phase from leapp.workflows.flags import Flags from leapp.workflows.tagfilters import TagFilter from leapp.workflows.policies import Policies from leapp.tags import {workflow_class}Tag class {workflow_class}(Workflow): name = '{workflow_name}' tag = {workflow_class}Tag short_name = '{workflow_short_name}' description = '''No description has been provided for the {workflow_name} workflow.''' # Template for phase definition - The order in which the phase classes are defined # within the Workflow class represents the execution order # # class PhaseName(Phase): # name = 'phase_name' # filter = TagFilter(PhaseTag) # policies = Policies(Policies.Errors.FailPhase, # Policies.Retry.Phase) # flags = Flags() """.format(workflow_name=name, workflow_class=class_name, workflow_short_name=short_name)) sys.stdout.write("New workflow {} has been created in {}\n".format( class_name, os.path.realpath(workflow_path)))
def cli(params): configure_logger() repository = find_and_scan_repositories(find_repository_basedir('.'), include_locals=True) try: repository.load() except LeappError as exc: sys.stderr.write(exc.message) sys.exit(1) wf = repository.lookup_workflow(params.name) if not wf: raise CommandError('Could not find any workflow named "{}"'.format(params.name)) instance = wf() instance.run(until_phase=params.until_phase, until_actor=params.until_actor) report_errors(instance.errors)
def cli(args): basedir = os.path.join(find_repository_basedir('.'), 'tags') if not os.path.isdir(basedir): os.mkdir(basedir) tag_path = os.path.join(basedir, args.tag_name.lower() + '.py') if os.path.exists(tag_path): raise CommandError("File already exists: {}".format(tag_path)) with open(tag_path, 'w') as f: f.write('''from leapp.tags import Tag class {tag_name}Tag(Tag): name = '{tag}' '''.format(tag_name=make_class_name(args.tag_name), tag=make_name(args.tag_name))) sys.stdout.write("New tag {} has been created in {}\n".format( make_class_name(args.tag_name), os.path.realpath(tag_path)))
def cli(args): log = configure_logger() basedir = find_repository_basedir('.') repository = find_and_scan_repositories(basedir, include_locals=True) try: repository.load() except LeappError as exc: sys.stderr.write(exc.message) sys.exit(1) actor_logger = log.getChild('actors') actor = repository.lookup_actor(args.actor_name) messaging = InProcessMessaging(stored=args.save_output) messaging.load(actor.consumes) actor(messaging=messaging, logger=actor_logger).run() report_errors(messaging.errors()) if args.print_output: json.dump(messaging.messages(), sys.stdout, indent=2) sys.stdout.write('\n')
def cli(args): start = datetime.datetime.utcnow() log = configure_logger() basedir = find_repository_basedir('.') repository = find_and_scan_repositories(basedir, include_locals=True) try: repository.load() except LeappError as exc: sys.stderr.write(exc.message) sys.stderr.write('\n') sys.exit(1) actor_logger = log.getChild('actors') actor = repository.lookup_actor(args.actor_name) if not actor: raise CommandError('Actor "{}" not found!'.format(args.actor_name)) config_model = getattr(import_module('leapp.models'), args.actor_config) if args.actor_config else None messaging = InProcessMessaging(stored=args.save_output, config_model=config_model) messaging.load(actor.consumes) failure = False with beautify_actor_exception(): try: actor(messaging=messaging, logger=actor_logger, config_model=config_model).run() except BaseException: failure = True raise report_errors(messaging.errors()) report_deprecations(os.getenv('LEAPP_EXECUTION_ID'), start=start) if failure or messaging.errors(): sys.exit(1) if args.print_output: json.dump(messaging.messages(), sys.stdout, indent=2) sys.stdout.write('\n')
def cli(params): configure_logger() repository = find_and_scan_repositories(find_repository_basedir('.'), include_locals=True) try: repository.load() except LeappError as exc: sys.stderr.write(exc.message) sys.stderr.write('\n') sys.exit(1) wf = repository.lookup_workflow(params.name) if not wf: raise CommandError('Could not find any workflow named "{}"'.format( params.name)) instance = wf() produced_late = set(instance.initial).intersection(set(instance.produces)) if produced_late: print_fail( _DESCRIPTION.format(' '.join([m.__name__ for m in produced_late]))) sys.exit(1)
def loaded_leapp_repository(request): """ This fixture will ensure that the repository for the current test run is loaded with all its links etc. This enables running actors and using models, tags, topics, workflows etc. Additionally loaded_leapp_repository gives you access to a :py:class:`leapp.repository.manager.RepositoryManager` instance. :Example: .. code-block:: python from leapp.snactor.fixture import loaded_leapp_repository from leapp.models import ExampleModel, ProcessedExampleModel def my_repository_library_test(loaded_leapp_repository): from leapp.libraries.common import global e = ExampleModel(value='Some string') result = global.process_function(e) assert type(result) is ProcessedExampleModel """ repository_path = find_repository_basedir(request.module.__file__) os.environ['LEAPP_CONFIG'] = os.path.join(repository_path, '.leapp', 'leapp.conf') os.environ['LEAPP_HOSTNAME'] = socket.getfqdn() context = str(uuid.uuid4()) with get_connection(None): Execution(context=context, kind='snactor-test-run', configuration='').store() os.environ["LEAPP_EXECUTION_ID"] = context manager = getattr(request.session, 'leapp_repository', None) if not manager: manager = find_and_scan_repositories(repository_path, include_locals=True) manager.load(resolve=True) yield manager
def test_find_repository_basedir(repository_dir): nested = repository_dir.mkdir('a').mkdir('b').mkdir('c') assert repository_dir.samefile(find_repository_basedir(nested.strpath)) assert repository_dir.samefile(find_repository_basedir(repository_dir.strpath)) assert find_repository_basedir('.') is None
def register_repo(args): base_dir = find_repository_basedir('.') if base_dir: register_path(base_dir) print('Repository successfully registered')
def cli(args): logging.basicConfig(level=logging.WARNING, stream=sys.stderr) base_dir = find_repository_basedir('.') if args.safe and args.json: sys.stderr.write( 'The options --safe and --json are currently mutually exclusive\n') sys.exit(1) if args.safe: sys.stdout.write( 'Repository:\n Name: {repository}\n Path: {base_dir}\n\n'.format( repository=get_repository_name(base_dir), base_dir=base_dir)) safe_discover(base_dir) sys.exit(0) repository = find_and_scan_repositories(base_dir, include_locals=True) try: repository.load() except LeappError as exc: sys.stderr.write(exc.message) sys.stderr.write('\n') sys.exit(1) actors = repository.actors topics = [ topic for topic in get_topics() if _is_local(repository, topic, base_dir, all_repos=args.all) ] models = [ model for model in get_models() if _is_local(repository, model, base_dir, all_repos=args.all) ] tags = [ tag for tag in get_tags() if _is_local(repository, tag, base_dir, all_repos=args.all) ] workflows = [ workflow for workflow in get_workflows() if _is_local(repository, workflow, base_dir, all_repos=args.all) ] if not args.json: sys.stdout.write( 'Repository:\n Name: {repository}\n Path: {base_dir}\n\n'.format( repository=get_repository_name(base_dir), base_dir=base_dir)) _print_group('Actors', actors, name_resolver=lambda x: x.class_name, path_resolver=_get_actor_path) _print_group('Models', models) _print_group('Tags', tags) _print_group('Topics', topics) _print_group('Workflows', workflows) else: output = { 'repository': get_repository_name(base_dir), 'base_dir': base_dir, 'topics': dict((topic.__name__, _get_topic_details(topic)) for topic in topics), 'models': dict((model.__name__, _get_model_details(model)) for model in models), 'actors': dict((actor.class_name, _get_actor_details(actor)) for actor in actors), 'tags': dict((tag.name, _get_tag_details(tag)) for tag in tags), 'workflows': dict((workflow.__name__, _get_workflow_details(workflow)) for workflow in workflows) } json.dump(output, sys.stdout, indent=2) sys.stdout.write('\n')
def register_repo(args): # noqa; pylint: disable=unused-argument base_dir = find_repository_basedir('.') if base_dir: register_path(base_dir) print('Repository successfully registered')
def _get_class_file(cls, repository_relative=True): path = os.path.abspath(sys.modules[cls.__module__].__file__.replace( '.pyc', '.py')) return os.path.relpath( path, find_repository_basedir('.') if repository_relative else os.getcwd())
def _get_actor_path(actor, repository_relative=True): path = actor.directory return os.path.relpath( path, find_repository_basedir('.') if repository_relative else os.getcwd())
from __future__ import print_function import json from leapp.utils.repository import find_repository_basedir from leapp.repository.scan import find_and_scan_repositories base_dir = find_repository_basedir('.') repository = find_and_scan_repositories(base_dir, include_locals=True) repository.load() if not hasattr(repository, 'repos'): repository.repos = [repository] print(json.dumps([repo.serialize() for repo in repository.repos]))