def execute(self, args): # pylint: disable=no-self-use,too-many-branches,too-many-statements loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) agenda = OrderedDict() agenda['config'] = OrderedDict(instrumentation=[], result_processors=[]) agenda['global'] = OrderedDict(iterations=args.iterations) agenda['workloads'] = [] device = None device_config = None for name in args.extensions: extcls = loader.get_extension_class(name) config = loader.get_default_config(name) del config['modules'] if extcls.kind == 'workload': entry = OrderedDict() entry['name'] = extcls.name if name != extcls.name: entry['label'] = name entry['params'] = config agenda['workloads'].append(entry) elif extcls.kind == 'device': if device is not None: raise ConfigError('Specifying multiple devices: {} and {}'.format(device.name, name)) device = extcls device_config = config agenda['config']['device'] = name agenda['config']['device_config'] = config else: if extcls.kind == 'instrument': agenda['config']['instrumentation'].append(name) if extcls.kind == 'result_processor': agenda['config']['result_processors'].append(name) agenda['config'][name] = config if args.include_runtime_params: if not device: if settings.device: device = loader.get_extension_class(settings.device) device_config = loader.get_default_config(settings.device) else: raise ConfigError('-r option requires for a device to be in the list of extensions') rps = OrderedDict() for rp in device.runtime_parameters: if hasattr(rp, 'get_runtime_parameters'): # a core parameter needs to be expanded for each of the # device's cores, if they're avialable for crp in rp.get_runtime_parameters(device_config.get('core_names', [])): rps[crp.name] = None else: rps[rp.name] = None agenda['global']['runtime_params'] = rps if args.output: wfh = open(args.output, 'w') else: wfh = sys.stdout yaml.dump(agenda, wfh, indent=4, default_flow_style=False) if args.output: wfh.close()
def execute(self, args): self.validate_args(args) self.logger.info("Connecting to device...") ext_loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) # Setup config self.config = RunConfiguration(ext_loader) for filepath in settings.get_config_paths(): self.config.load_config(filepath) self.config.set_agenda(Agenda()) self.config.finalize() context = LightContext(self.config) # Setup device self.device = ext_loader.get_device(settings.device, **settings.device_config) self.device.validate() self.device.dynamic_modules = [] self.device.connect() self.device.initialize(context) host_binary = context.resolver.get(Executable(NO_ONE, self.device.abi, 'revent')) self.target_binary = self.device.install_executable(host_binary) self.run(args)
def execute(self, args): filters = {} if args.name: filters["name"] = args.name ext_loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) results = ext_loader.list_extensions(args.kind[:-1]) if filters or args.platform: filtered_results = [] for result in results: passed = True for k, v in filters.iteritems(): if getattr(result, k) != v: passed = False break if passed and args.platform: passed = check_platform(result, args.platform) if passed: filtered_results.append(result) else: # no filters specified filtered_results = results if filtered_results: output = DescriptionListFormatter() for result in sorted(filtered_results, key=lambda x: x.name): output.add_item(get_summary(result), result.name) print output.format_data()
def execute(self, args): self.validate_args(args) self.logger.info("Connecting to device...") ext_loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) # Setup config self.config = RunConfiguration(ext_loader) for filepath in settings.get_config_paths(): self.config.load_config(filepath) self.config.set_agenda(Agenda()) self.config.finalize() context = LightContext(self.config) # Setup device self.device = ext_loader.get_device(settings.device, **settings.device_config) self.device.validate() self.device.dynamic_modules = [] self.device.connect() self.device.initialize(context) host_binary = context.resolver.get( Executable(NO_ONE, self.device.abi, 'revent')) self.target_binary = self.device.install_executable(host_binary) self.run(args)
def execute(self, args): filters = {} if args.name: filters['name'] = args.name ext_loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) results = ext_loader.list_extensions(args.kind[:-1]) if filters or args.platform: filtered_results = [] for result in results: passed = True for k, v in filters.iteritems(): if getattr(result, k) != v: passed = False break if passed and args.platform: passed = check_platform(result, args.platform) if passed: filtered_results.append(result) else: # no filters specified filtered_results = results if filtered_results: output = DescriptionListFormatter() for result in sorted(filtered_results, key=lambda x: x.name): output.add_item(get_summary(result), result.name) print output.format_data()
def create_workload(name, kind='basic', where='local', check_name=True, **kwargs): if check_name: extloader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) if name in [wl.name for wl in extloader.list_workloads()]: raise CommandError( 'Workload with name "{}" already exists.'.format(name)) class_name = get_class_name(name) if where == 'local': workload_dir = _d( os.path.join(settings.environment_root, 'workloads', name)) else: workload_dir = _d(os.path.join(where, name)) if kind == 'basic': create_basic_workload(workload_dir, name, class_name, **kwargs) elif kind == 'uiauto': create_uiautomator_workload(workload_dir, name, class_name, **kwargs) elif kind == 'android': create_android_benchmark(workload_dir, name, class_name, **kwargs) elif kind == 'android_uiauto': create_android_uiauto_benchmark(workload_dir, name, class_name, **kwargs) else: raise CommandError('Unknown workload type: {}'.format(kind)) print 'Workload created in {}'.format(workload_dir)
def init_resources(self, context): super(Applaunch, self).init_resources(context) loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) self.workload_params['markers_enabled'] = True self.workload = loader.get_workload(self.workload_name, self.device, **self.workload_params) self.init_workload_resources(context) self.package = self.workload.package
def init_resources(self, context): super(Applaunch, self).init_resources(context) loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) self.workload_params['markers_enabled'] = True self.workload = loader.get_workload(self.workload_name, self.device, **self.workload_params) # This workload's uiauto apk will not be installed -- automation will be loaded directly form a path # so do not uninstall during teardown self.workload.uninstall_uiauto_apk = False self.init_workload_resources(context) self.package = self.workload.package
def execute(self, args): self.logger.debug('Program arguments: {}'.format(vars(args))) if args.force: self.logger.info('Force-download of assets requested') if not args.url: self.logger.debug('URL not provided, falling back to default setting in config') self.logger.info('Downloading external assets from {}'.format(args.url)) # Get file index of assets ext_loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) getter = ext_loader.get_resource_getter('http_assets', None, url=args.url, always_fetch=args.force) try: getter.index = getter.fetch_index() except (ConnectionError, RequestException) as e: self.exit_with_error(str(e)) all_assets = dict() for k, v in getter.index.iteritems(): all_assets[str(k)] = [str(asset['path']) for asset in v] # Here we get a list of all extensions present in the current WA installation, # and cross-check that against the list of extensions whose assets are requested. # The aim is to avoid downloading assets for extensions that do not exist, since # WA extensions and asset index can be updated independently and go out of sync. all_extensions = [ext.name for ext in ext_loader.list_extensions()] assets_to_get = set(all_assets).intersection(all_extensions) if args.exts: assets_to_get = assets_to_get.intersection(args.exts) # Check list is not empty if not assets_to_get: if args.all: self.exit_with_error('Could not find extensions: {}'.format(', '.join(all_assets.keys()))) else: # args.exts self.exit_with_error('Asset index has no entries for: {}'.format(', '.join(args.exts))) # Check out of sync extensions i.e. do not exist in both WA and assets index missing = set(all_assets).difference(all_extensions) | set(args.exts or []).difference(all_assets) if missing: self.logger.warning('Not getting assets for missing extensions: {}'.format(', '.join(missing))) # Ideally the extension loader would be used to instantiate, but it does full # validation of the extension, like checking connected devices or supported # platform(s). This info might be unavailable and is not required to download # assets, since they are classified by extension name alone. So instead we use # a simple subclass of ``Extension`` providing a valid ``name`` attribute. for ext_name in assets_to_get: owner = _instantiate(NamedExtension, ext_name) self.logger.info('Getting assets for: {}'.format(ext_name)) for asset in all_assets[ext_name]: getter.get(File(owner, asset)) # Download the files
def init_resources(self, context): # TODO: find a better APK to use for this. peacekeeper = ExtensionLoader().get_workload('peacekeeper', self.device) self.apk_file = context.resolver.get( wlauto.common.android.resources.ApkFile(peacekeeper), variant_name='chrome') self.package = ApkInfo(self.apk_file).package
def execute(self, args): # pylint: disable=unpacking-non-sequence ext_loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) extension = ext_loader.get_extension_class(args.name) out = StringIO() term_width, term_height = get_terminal_size() format_extension(extension, out, term_width) text = out.getvalue() pager = get_pager() if len(text.split('\n')) > term_height and pager: try: sp = subprocess.Popen(pager, stdin=subprocess.PIPE) sp.communicate(text) except OSError: self.logger.warning('Could not use PAGER "{}"'.format(pager)) sys.stdout.write(text) else: sys.stdout.write(text)
def __init__(self, device, **kwargs): super(AppShare, self).__init__(device, **kwargs) self.deployable_assets = [self.test_image] self.clean_assets = True loader = ExtensionLoader() # Initialise googlephotos args_googlephotos = dict(kwargs) del args_googlephotos['test_image'] del args_googlephotos['email_recipient'] del args_googlephotos['skype_login_name'] del args_googlephotos['skype_login_pass'] del args_googlephotos['skype_contact_name'] args_googlephotos['markers_enabled'] = False self.wl_googlephotos = loader.get_workload('googlephotos', device, **args_googlephotos) self.view += self.wl_googlephotos.view self.package.append(self.wl_googlephotos.package) # Initialise gmail args_gmail = dict(kwargs) del args_gmail['test_image'] args_gmail['recipient'] = args_gmail.pop('email_recipient') del args_gmail['skype_login_name'] del args_gmail['skype_login_pass'] del args_gmail['skype_contact_name'] args_gmail['markers_enabled'] = False self.wl_gmail = loader.get_workload('gmail', device, **args_gmail) self.view += self.wl_gmail.view self.package.append(self.wl_gmail.package) # Initialise skype args_skype = dict(kwargs) del args_skype['test_image'] del args_skype['email_recipient'] args_skype['login_name'] = args_skype.pop('skype_login_name') args_skype['login_pass'] = args_skype.pop('skype_login_pass') args_skype['contact_name'] = args_skype.pop('skype_contact_name') args_skype['markers_enabled'] = False self.wl_skype = loader.get_workload('skype', device, **args_skype) self.view += self.wl_skype.view self.package.append(self.wl_skype.package)
def create_workload(name, kind='basic', where='local', check_name=True, **kwargs): if check_name: extloader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) if name in [wl.name for wl in extloader.list_workloads()]: raise CommandError('Workload with name "{}" already exists.'.format(name)) class_name = get_class_name(name) if where == 'local': workload_dir = _d(os.path.join(settings.environment_root, 'workloads', name)) else: workload_dir = _d(os.path.join(where, name)) if kind == 'basic': create_basic_workload(workload_dir, name, class_name, **kwargs) elif kind == 'uiauto': create_uiautomator_workload(workload_dir, name, class_name, **kwargs) elif kind == 'android': create_android_benchmark(workload_dir, name, class_name, **kwargs) elif kind == 'android_uiauto': create_android_uiauto_benchmark(workload_dir, name, class_name, **kwargs) else: raise CommandError('Unknown workload type: {}'.format(kind)) print 'Workload created in {}'.format(workload_dir)
def generate_extension_documentation(source_dir, outdir, ignore_paths): loader = ExtensionLoader(keep_going=True) loader.clear() loader.update(paths=[source_dir], ignore_paths=ignore_paths) for ext_type in loader.extension_kinds: if not ext_type in GENERATE_FOR: continue outfile = os.path.join(outdir, '{}s.rst'.format(ext_type)) with open(outfile, 'w') as wfh: wfh.write('.. _{}s:\n\n'.format(ext_type)) wfh.write(underline(capitalize('{}s'.format(ext_type)))) exts = loader.list_extensions(ext_type) for ext in sorted(exts, key=lambda x: x.name): wfh.write(get_rst_from_extension(ext))
def execute(self, args): self.logger.debug('Program arguments: {}'.format(vars(args))) if args.force: self.logger.info('Force-download of assets requested') if not args.url: self.logger.debug( 'URL not provided, falling back to default setting in config') self.logger.info('Downloading external assets from {}'.format( args.url)) # Get file index of assets ext_loader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) getter = ext_loader.get_resource_getter('http_assets', None, url=args.url, always_fetch=args.force) try: getter.index = getter.fetch_index() except (ConnectionError, RequestException) as e: self.exit_with_error(str(e)) all_assets = dict() for k, v in getter.index.iteritems(): all_assets[str(k)] = [str(asset['path']) for asset in v] # Here we get a list of all extensions present in the current WA installation, # and cross-check that against the list of extensions whose assets are requested. # The aim is to avoid downloading assets for extensions that do not exist, since # WA extensions and asset index can be updated independently and go out of sync. all_extensions = [ext.name for ext in ext_loader.list_extensions()] assets_to_get = set(all_assets).intersection(all_extensions) if args.exts: assets_to_get = assets_to_get.intersection(args.exts) # Check list is not empty if not assets_to_get: if args.all: self.exit_with_error('Could not find extensions: {}'.format( ', '.join(all_assets.keys()))) else: # args.exts self.exit_with_error( 'Asset index has no entries for: {}'.format(', '.join( args.exts))) # Check out of sync extensions i.e. do not exist in both WA and assets index missing = set(all_assets).difference(all_extensions) | set( args.exts or []).difference(all_assets) if missing: self.logger.warning( 'Not getting assets for missing extensions: {}'.format( ', '.join(missing))) # Ideally the extension loader would be used to instantiate, but it does full # validation of the extension, like checking connected devices or supported # platform(s). This info might be unavailable and is not required to download # assets, since they are classified by extension name alone. So instead we use # a simple subclass of ``Extension`` providing a valid ``name`` attribute. for ext_name in assets_to_get: owner = _instantiate(NamedExtension, ext_name) self.logger.info('Getting assets for: {}'.format(ext_name)) for asset in all_assets[ext_name]: getter.get(File(owner, asset)) # Download the files