def create_uiauto_project(path, name): package_name = 'com.arm.wlauto.uiauto.' + name.lower() shutil.copytree(os.path.join(TEMPLATES_DIR, 'uiauto_template'), path) manifest_path = os.path.join(path, 'app', 'src', 'main') mainifest = os.path.join(_d(manifest_path), 'AndroidManifest.xml') with open(mainifest, 'w') as wfh: wfh.write(render_template('uiauto_AndroidManifest.xml', {'package_name': package_name})) build_gradle_path = os.path.join(path, 'app') build_gradle = os.path.join(_d(build_gradle_path), 'build.gradle') with open(build_gradle, 'w') as wfh: wfh.write(render_template('uiauto_build.gradle', {'package_name': package_name})) build_script = os.path.join(path, 'build.sh') with open(build_script, 'w') as wfh: wfh.write(render_template('uiauto_build_script', {'package_name': package_name})) os.chmod(build_script, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) source_file = _f(os.path.join(path, 'app', 'src', 'main', 'java', os.sep.join(package_name.split('.')[:-1]), 'UiAutomation.java')) with open(source_file, 'w') as wfh: wfh.write(render_template('UiAutomation.java', {'name': name, 'package_name': package_name}))
def create_workload(name, kind='basic', where='local', check_name=True, **kwargs): if check_name: extloader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) if name in [wl.name for wl in extloader.list_workloads()]: raise CommandError( 'Workload with name "{}" already exists.'.format(name)) class_name = get_class_name(name) if where == 'local': workload_dir = _d( os.path.join(settings.environment_root, 'workloads', name)) else: workload_dir = _d(os.path.join(where, name)) if kind == 'basic': create_basic_workload(workload_dir, name, class_name, **kwargs) elif kind == 'uiauto': create_uiautomator_workload(workload_dir, name, class_name, **kwargs) elif kind == 'android': create_android_benchmark(workload_dir, name, class_name, **kwargs) elif kind == 'android_uiauto': create_android_uiauto_benchmark(workload_dir, name, class_name, **kwargs) else: raise CommandError('Unknown workload type: {}'.format(kind)) print 'Workload created in {}'.format(workload_dir)
def setup(self, context): before_dirs = [ _d(os.path.join(context.output_directory, 'before', self._local_dir(d))) for d in self.paths ] after_dirs = [ _d(os.path.join(context.output_directory, 'after', self._local_dir(d))) for d in self.paths ] diff_dirs = [ _d(os.path.join(context.output_directory, 'diff', self._local_dir(d))) for d in self.paths ] self.device_and_host_paths = zip(self.paths, before_dirs, after_dirs, diff_dirs) if self.use_tmpfs: for d in self.paths: before_dir = self.device.path.join(self.on_device_before, self.device.path.dirname(as_relative(d))) after_dir = self.device.path.join(self.on_device_after, self.device.path.dirname(as_relative(d))) if self.device.file_exists(before_dir): self.device.execute('rm -rf {}'.format(before_dir), as_root=True) self.device.execute('mkdir -p {}'.format(before_dir), as_root=True) if self.device.file_exists(after_dir): self.device.execute('rm -rf {}'.format(after_dir), as_root=True) self.device.execute('mkdir -p {}'.format(after_dir), as_root=True)
def update_result(self, context): # pylint: disable=R0914 self.logger.debug('Downloading data files.') output_directory = _d(os.path.join(context.output_directory, 'daq')) self._execute_command('get_data', output_directory=output_directory) for entry in os.listdir(output_directory): context.add_iteration_artifact('DAQ_{}'.format(os.path.splitext(entry)[0]), path=os.path.join('daq', entry), kind='data', description='DAQ power measurments.') port = os.path.splitext(entry)[0] path = os.path.join(output_directory, entry) key = (context.spec.id, context.workload.name, context.current_iteration) if key not in self._results: self._results[key] = {} with open(path) as fh: reader = csv.reader(fh) metrics = reader.next() data = [map(float, d) for d in zip(*list(reader))] n = len(data[0]) means = [s / n for s in map(sum, data)] for metric, value in zip(metrics, means): metric_name = '{}_{}'.format(port, metric) context.result.add_metric(metric_name, round(value, 3), UNITS[metric]) self._results[key][metric_name] = round(value, 3) energy = sum(data[metrics.index('power')]) * (self.sampling_rate / 1000000) context.result.add_metric('{}_energy'.format(port), round(energy, 3), UNITS['energy'])
def get(self, resource, **kwargs): filename = '.'.join( [resource.owner.device.name, resource.stage, 'revent']).lower() location = _d( os.path.join(self.get_base_location(resource), 'revent_files')) for candidate in os.listdir(location): if candidate.lower() == filename.lower(): return os.path.join(location, candidate)
def next_job(self, job): """Invoked by the runner when starting a new iteration of workload execution.""" self.current_job = job self.job_iteration_counts[self.spec.id] += 1 if not self.aborted: outdir_name = '_'.join(map(str, [self.spec.label, self.spec.id, self.current_iteration])) self.output_directory = _d(os.path.join(self.run_output_directory, outdir_name)) self.iteration_artifacts = [wa for wa in self.workload.artifacts] self.current_job.result.iteration = self.current_iteration self.current_job.result.output_directory = self.output_directory
def create_uiautomator_workload(path, name, class_name): uiauto_path = _d(os.path.join(path, 'uiauto')) create_uiauto_project(uiauto_path, name) source_file = os.path.join(path, '__init__.py') with open(source_file, 'w') as wfh: wfh.write( render_template('uiauto_workload', { 'name': name, 'class_name': class_name }))
def create_android_uiauto_benchmark(path, name, class_name): uiauto_path = _d(os.path.join(path, 'uiauto')) create_uiauto_project(uiauto_path, name) source_file = os.path.join(path, '__init__.py') with open(source_file, 'w') as wfh: wfh.write( render_template('android_uiauto_benchmark', { 'name': name, 'class_name': class_name }))
def get(self, resource, **kwargs): device_model = resource.owner.device.get_device_model() wa_device_name = resource.owner.device.name for name in [device_model, wa_device_name]: if not name: continue filename = '.'.join([name, resource.stage, 'revent']).lower() location = _d(os.path.join(self.get_base_location(resource), 'revent_files')) for candidate in os.listdir(location): if candidate.lower() == filename.lower(): return os.path.join(location, candidate)
def update_result(self, context): # pylint: disable=R0914 self.logger.debug('Downloading data files.') output_directory = _d(os.path.join(context.output_directory, 'daq')) self._execute_command('get_data', output_directory=output_directory) if self.merge_channels: self._merge_channels(context) for entry in os.listdir(output_directory): context.add_iteration_artifact( 'DAQ_{}'.format(os.path.splitext(entry)[0]), path=os.path.join('daq', entry), kind='data', description='DAQ power measurments.') port = os.path.splitext(entry)[0] path = os.path.join(output_directory, entry) key = (context.spec.id, context.spec.label, context.current_iteration) if key not in self._results: self._results[key] = {} temp_file = os.path.join(tempfile.gettempdir(), entry) writer, wfh = None, None with open(path) as fh: if self.negative_samples != 'keep': wfh = open(temp_file, 'wb') writer = csv.writer(wfh) reader = csv.reader(fh) metrics = reader.next() if writer: writer.writerow(metrics) self._metrics |= set(metrics) rows = _get_rows(reader, writer, self.negative_samples) data = zip(*rows) if writer: wfh.close() shutil.move(temp_file, os.path.join(output_directory, entry)) n = len(data[0]) means = [s / n for s in map(sum, data)] for metric, value in zip(metrics, means): metric_name = '{}_{}'.format(port, metric) context.result.add_metric(metric_name, round(value, 3), UNITS[metric]) self._results[key][metric_name] = round(value, 3) energy = sum(data[metrics.index('power')]) * ( self.sampling_rate / 1000000) context.result.add_metric('{}_energy'.format(port), round(energy, 3), UNITS['energy'])
def setup(self, context): before_dirs = [ _d( os.path.join(context.output_directory, 'before', self._local_dir(d))) for d in self.paths ] after_dirs = [ _d( os.path.join(context.output_directory, 'after', self._local_dir(d))) for d in self.paths ] diff_dirs = [ _d( os.path.join(context.output_directory, 'diff', self._local_dir(d))) for d in self.paths ] self.device_and_host_paths = zip(self.paths, before_dirs, after_dirs, diff_dirs) if self.use_tmpfs: for d in self.paths: before_dir = self.device.path.join( self.on_device_before, self.device.path.dirname(as_relative(d))) after_dir = self.device.path.join( self.on_device_after, self.device.path.dirname(as_relative(d))) if self.device.file_exists(before_dir): self.device.execute('rm -rf {}'.format(before_dir), as_root=True) self.device.execute('mkdir -p {}'.format(before_dir), as_root=True) if self.device.file_exists(after_dir): self.device.execute('rm -rf {}'.format(after_dir), as_root=True) self.device.execute('mkdir -p {}'.format(after_dir), as_root=True)
def create_uiauto_project(path, name): package_name = 'com.arm.wlauto.uiauto.' + name.lower() shutil.copytree(os.path.join(TEMPLATES_DIR, 'uiauto_template'), path) manifest_path = os.path.join(path, 'app', 'src', 'main') mainifest = os.path.join(_d(manifest_path), 'AndroidManifest.xml') with open(mainifest, 'w') as wfh: wfh.write( render_template('uiauto_AndroidManifest.xml', {'package_name': package_name})) build_gradle_path = os.path.join(path, 'app') build_gradle = os.path.join(_d(build_gradle_path), 'build.gradle') with open(build_gradle, 'w') as wfh: wfh.write( render_template('uiauto_build.gradle', {'package_name': package_name})) build_script = os.path.join(path, 'build.sh') with open(build_script, 'w') as wfh: wfh.write( render_template('uiauto_build_script', {'package_name': package_name})) os.chmod(build_script, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) source_file = _f( os.path.join(path, 'app', 'src', 'main', 'java', os.sep.join(package_name.split('.')[:-1]), 'UiAutomation.java')) with open(source_file, 'w') as wfh: wfh.write( render_template('UiAutomation.java', { 'name': name, 'package_name': package_name }))
def create_workload(name, kind='basic', where='local', check_name=True, **kwargs): if check_name: extloader = ExtensionLoader(packages=settings.extension_packages, paths=settings.extension_paths) if name in [wl.name for wl in extloader.list_workloads()]: raise CommandError('Workload with name "{}" already exists.'.format(name)) class_name = get_class_name(name) if where == 'local': workload_dir = _d(os.path.join(settings.environment_root, 'workloads', name)) else: workload_dir = _d(os.path.join(where, name)) if kind == 'basic': create_basic_workload(workload_dir, name, class_name, **kwargs) elif kind == 'uiauto': create_uiautomator_workload(workload_dir, name, class_name, **kwargs) elif kind == 'android': create_android_benchmark(workload_dir, name, class_name, **kwargs) elif kind == 'android_uiauto': create_android_uiauto_benchmark(workload_dir, name, class_name, **kwargs) else: raise CommandError('Unknown workload type: {}'.format(kind)) print 'Workload created in {}'.format(workload_dir)
def update_result(self, context): # pylint: disable=R0914 self.logger.debug('Downloading data files.') output_directory = _d(os.path.join(context.output_directory, 'daq')) self._execute_command('get_data', output_directory=output_directory) if self.merge_channels: self._merge_channels(context) for entry in os.listdir(output_directory): context.add_iteration_artifact('DAQ_{}'.format(os.path.splitext(entry)[0]), path=os.path.join('daq', entry), kind='data', description='DAQ power measurments.') port = os.path.splitext(entry)[0] path = os.path.join(output_directory, entry) key = (context.spec.id, context.spec.label, context.current_iteration) if key not in self._results: self._results[key] = {} temp_file = os.path.join(tempfile.gettempdir(), entry) writer, wfh = None, None with open(path) as fh: if self.negative_samples != 'keep': wfh = open(temp_file, 'wb') writer = csv.writer(wfh) reader = csv.reader(fh) metrics = reader.next() if writer: writer.writerow(metrics) self._metrics |= set(metrics) rows = _get_rows(reader, writer, self.negative_samples) data = zip(*rows) if writer: wfh.close() shutil.move(temp_file, os.path.join(output_directory, entry)) n = len(data[0]) means = [s / n for s in map(sum, data)] for metric, value in zip(metrics, means): metric_name = '{}_{}'.format(port, metric) context.result.add_metric(metric_name, round(value, 3), UNITS[metric]) self._results[key][metric_name] = round(value, 3) energy = sum(data[metrics.index('power')]) * (self.sampling_rate / 1000000) context.result.add_metric('{}_energy'.format(port), round(energy, 3), UNITS['energy'])
def get(self, resource, **kwargs): device_model = resource.owner.device.get_device_model() wa_device_name = resource.owner.device.name for name in [device_model, wa_device_name]: if not name: continue filename = ".".join([name, resource.stage, "revent"]).lower() location = _d(os.path.join(self.get_base_location(resource), "revent_files")) for candidate in os.listdir(location): if candidate.lower() == filename.lower(): path = os.path.join(location, candidate) try: ReventParser.check_revent_file(path) return path except ValueError as e: self.logger.warning(e.message)
def get(self, resource, **kwargs): device_model = resource.owner.device.get_device_model() wa_device_name = resource.owner.device.name for name in [device_model, wa_device_name]: if not name: continue filename = '.'.join([name, resource.stage, 'revent']).lower() location = _d(os.path.join(self.get_base_location(resource), 'revent_files')) for candidate in os.listdir(location): if candidate.lower() == filename.lower(): path = os.path.join(location, candidate) try: ReventRecording(path).close() # Check valid recording return path except ValueError as e: self.logger.warning(e.message)
def try_get_resource(self, resource, version, remote_path, local_path): if not self.always_fetch: result = self.get_from(resource, version, local_path) if result: return result if remote_path: # Didn't find it cached locally; now check the remoted result = self.get_from(resource, version, remote_path) if not result: return result else: # remote path is not set return None # Found it remotely, cache locally, then return it local_full_path = os.path.join(_d(local_path), os.path.basename(result)) self.logger.debug('cp {} {}'.format(result, local_full_path)) shutil.copy(result, local_full_path) return local_full_path
def get(self, resource, **kwargs): device_model = resource.owner.device.get_device_model() wa_device_name = resource.owner.device.name for name in [device_model, wa_device_name]: if not name: continue filename = '.'.join([name, resource.stage, 'revent']).lower() location = _d( os.path.join(self.get_base_location(resource), 'revent_files')) for candidate in os.listdir(location): if candidate.lower() == filename.lower(): path = os.path.join(location, candidate) try: ReventRecording(path).close() # Check valid recording return path except ValueError as e: self.logger.warning(e.message)
def _merge_channels(self, context): # pylint: disable=r0914 output_directory = _d(os.path.join(context.output_directory, 'daq')) for name, labels in self.label_map.iteritems(): summed = None for label in labels: path = os.path.join(output_directory, "{}.csv".format(label)) with open(path) as fh: reader = csv.reader(fh) metrics = reader.next() rows = _get_rows(reader, None, self.negative_samples) if summed: summed = [[x + y for x, y in zip(a, b)] for a, b in zip(rows, summed)] else: summed = rows output_path = os.path.join(output_directory, "{}.csv".format(name)) with open(output_path, 'wb') as wfh: writer = csv.writer(wfh) writer.writerow(metrics) for row in summed: writer.writerow(row)
def create_uiautomator_workload(path, name, class_name): uiauto_path = _d(os.path.join(path, 'uiauto')) create_uiauto_project(uiauto_path, name) source_file = os.path.join(path, '__init__.py') with open(source_file, 'w') as wfh: wfh.write(render_template('uiauto_workload', {'name': name, 'class_name': class_name}))
def create_android_uiauto_benchmark(path, name, class_name): uiauto_path = _d(os.path.join(path, 'uiauto')) create_uiauto_project(uiauto_path, name) source_file = os.path.join(path, '__init__.py') with open(source_file, 'w') as wfh: wfh.write(render_template('android_uiauto_benchmark', {'name': name, 'class_name': class_name}))
def dependencies_directory(self): return _d(os.path.join(settings.dependencies_directory, self.name))
def init_resources(self, context): self.video_directory = _d( os.path.join(settings.dependencies_directory, 'video')) self.video_files = defaultdict(list) self.enum_video_files() self._selected_file = None
def init_resources(self, context): self.video_directory = _d(os.path.join(settings.dependencies_directory, 'video')) self.video_files = defaultdict(list) self.enum_video_files() self._selected_file = None
def get(self, resource, **kwargs): filename = '.'.join([resource.owner.device.name, resource.stage, 'revent']).lower() location = _d(os.path.join(self.get_base_location(resource), 'revent_files')) for candidate in os.listdir(location): if candidate.lower() == filename.lower(): return os.path.join(location, candidate)