def fireperf_e2e_test(target_environment, plugin_repo_dir):
    """Run Firebase Performance end to end test."""

    _logger.info('Building fireperf plugin ...')
    with chdir(plugin_repo_dir):
        build_plugin_task = ':firebase-performance:perf-plugin:publishToMavenLocal'
        gradle.run(build_plugin_task, gradle.P('publishMode', 'SNAPSHOT'))

    version = _find_fireperf_plugin_version()
    _logger.info(
        f'Setting environment variable: FIREBASE_PERF_PLUGIN_VERSION={version} ...'
    )
    os.environ['FIREBASE_PERF_PLUGIN_VERSION'] = version

    fireperf_e2e_test_gradle_command = [
        '--build-cache', '--parallel', '--continue',
        ':firebase-perf:e2e-app:deviceCheck'
    ]
    if target_environment == 'autopush':
        fireperf_e2e_test_gradle_command += [
            gradle.P('fireperfBuildForAutopush', 'true')
        ]
    _logger.info(
        f'Running fireperf e2e test with target environment: {target_environment} ...'
    )
    gradle.run(*fireperf_e2e_test_gradle_command)
    async def _create_test_src(self):
        app_name = self.test_app_config['name']
        app_id = self.test_app_config['application-id']
        self.logger.info(
            f'Creating test app "{app_name}" with application-id "{app_id}"...'
        )

        mustache_context = {
            'application-id':
            app_id,
            'plugins':
            self.test_app_config['plugins']
            if 'plugins' in self.test_app_config else [],
            'dependencies': [{
                'key': x,
                'version': self.artifact_versions[x]
            } for x in self.test_app_config['dependencies']]
            if 'dependencies' in self.test_app_config else [],
        }

        if app_name != 'baseline':
            mustache_context['plugins'].append(
                'com.google.gms.google-services')

        shutil.copytree('template', self.test_app_dir)
        with chdir(self.test_app_dir):
            renderer = pystache.Renderer()
            mustaches = glob.glob('**/*.mustache', recursive=True)
            for mustache in mustaches:
                result = renderer.render_path(mustache, mustache_context)
                original_name = mustache[:
                                         -9]  # TODO(yifany): mustache.removesuffix('.mustache')
                with open(original_name, 'w') as file:
                    file.write(result)
Пример #3
0
  async def _create_benchmark_projects(self):
    app_name = self.test_app_config['name']
    self.logger.info(f'Creating test app "{app_name}"...')

    mustache_context = await self._prepare_mustache_context()

    shutil.copytree('template', self.test_app_dir)
    with chdir(self.test_app_dir):
      renderer = pystache.Renderer()
      mustaches = glob.glob('**/*.mustache', recursive=True)
      for mustache in mustaches:
        result = renderer.render_path(mustache, mustache_context)
        original_name = mustache[:-9]  # TODO(yifany): mustache.removesuffix('.mustache')
        with open(original_name, 'w') as file:
          file.write(result)
Пример #4
0
async def _launch_macrobenchmark_test():
  _logger.info('Starting macrobenchmark test...')

  artifact_versions, config, _, _ = await asyncio.gather(
    _parse_artifact_versions(),
    _parse_config_yaml(),
    _create_gradle_wrapper(),
    _copy_google_services(),
  )

  with chdir('macrobenchmark'):
    runners = [MacrobenchmarkTest(k, v, artifact_versions) for k, v in config.items()]
    results = await asyncio.gather(*[x.run() for x in runners], return_exceptions=True)

  if any(map(lambda x: isinstance(x, Exception), results)):
    _logger.error(f'Exceptions: {[x for x in results if (isinstance(x, Exception))]}')
    raise click.ClickException('Macrobenchmark test failed with above errors.')

  _logger.info('Macrobenchmark test finished.')
Пример #5
0
async def _launch_macrobenchmark_test():
  _logger.info('Starting macrobenchmark test...')

  artifact_versions, config, _, _ = await asyncio.gather(
    _parse_artifact_versions(),
    _parse_config_yaml(),
    _create_gradle_wrapper(),
    _copy_google_services(),
  )

  _logger.info(f'Artifact versions: {artifact_versions}')

  with chdir('health-metrics/macrobenchmark'):
    runners = [MacrobenchmarkTest(k, v, artifact_versions) for k, v in config.items()]
    results = await asyncio.gather(*[x.run() for x in runners], return_exceptions=True)

  await _post_processing(results)

  _logger.info('Macrobenchmark test finished.')