Ejemplo n.º 1
0
def create_docker_jobspec(name, dockerfile_dir, shell_command, environ={},
                   flake_retries=0, timeout_retries=0):
  """Creates jobspec for a task running under docker."""
  environ = environ.copy()
  environ['RUN_COMMAND'] = shell_command

  docker_args=[]
  for k,v in environ.iteritems():
    docker_args += ['-e', '%s=%s' % (k, v)]
  docker_env = {'DOCKERFILE_DIR': dockerfile_dir,
                'DOCKER_RUN_SCRIPT': 'tools/jenkins/docker_run.sh',
                'OUTPUT_DIR': 'artifacts'}
  jobspec = jobset.JobSpec(
          cmdline=['tools/jenkins/build_and_run_docker.sh'] + docker_args,
          environ=docker_env,
          shortname='build_artifact.%s' % (name),
          timeout_seconds=30*60,
          flake_retries=flake_retries,
          timeout_retries=timeout_retries)
  return jobspec
Ejemplo n.º 2
0
def server_jobspec(language, docker_image):
  """Create jobspec for running a server"""
  container_name = dockerjob.random_name('interop_server_%s' % language.safename)
  cmdline = bash_login_cmdline(
      language.server_cmd(['--port=%s' % _DEFAULT_SERVER_PORT]))
  environ = language.global_env()
  docker_cmdline = docker_run_cmdline(cmdline,
                                      image=docker_image,
                                      cwd=language.server_cwd,
                                      environ=environ,
                                      docker_args=['-p', str(_DEFAULT_SERVER_PORT),
                                                   '--name', container_name])

  server_job = jobset.JobSpec(
          cmdline=docker_cmdline,
          environ=environ,
          shortname='interop_server_%s' % language,
          timeout_seconds=30*60)
  server_job.container_name = container_name
  return server_job
Ejemplo n.º 3
0
def create_scenario_jobspec(scenario_json, workers, remote_host=None,
                            bq_result_table=None):
  """Runs one scenario using QPS driver."""
  # setting QPS_WORKERS env variable here makes sure it works with SSH too.
  cmd = 'QPS_WORKERS="%s" ' % ','.join(workers)
  if bq_result_table:
    cmd += 'BQ_RESULT_TABLE="%s" ' % bq_result_table
  cmd += 'tools/run_tests/performance/run_qps_driver.sh '
  cmd += '--scenarios_json=%s ' % pipes.quote(json.dumps({'scenarios': [scenario_json]}))
  cmd += '--scenario_result_file=scenario_result.json'
  if remote_host:
    user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, remote_host)
    cmd = 'ssh %s "cd ~/performance_workspace/grpc/ && "%s' % (user_at_host, pipes.quote(cmd))

  return jobset.JobSpec(
      cmdline=[cmd],
      shortname='qps_json_driver.%s' % scenario_json['name'],
      timeout_seconds=3*60,
      shell=True,
      verbose_success=True)
Ejemplo n.º 4
0
def create_qpsworker_job(language, shortname=None,
                         port=10000, remote_host=None):
  # TODO: support more languages
  cmdline = language.worker_cmdline() + ['--driver_port=%s' % port]
  if remote_host:
    user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, remote_host)
    cmdline = ['ssh',
               str(user_at_host),
               'cd ~/performance_workspace/grpc/ && %s' % ' '.join(cmdline)]
    host_and_port='%s:%s' % (remote_host, port)
  else:
    host_and_port='localhost:%s' % port

  # TODO(jtattermusch): with some care, we can calculate the right timeout
  # of a worker from the sum of warmup + benchmark times for all the scenarios
  jobspec = jobset.JobSpec(
      cmdline=cmdline,
      shortname=shortname,
      timeout_seconds=15*60)
  return QpsWorkerJob(jobspec, language, host_and_port)
Ejemplo n.º 5
0
    def job_spec(self, cmdline, hash_targets, shortname=None, environ={}):
        """Construct a jobset.JobSpec for a test under this config

       Args:
         cmdline:      a list of strings specifying the command line the test
                       would like to run
         hash_targets: either None (don't do caching of test results), or
                       a list of strings specifying files to include in a
                       binary hash to check if a test has changed
                       -- if used, all artifacts needed to run the test must
                          be listed
    """
        actual_environ = self.environ.copy()
        for k, v in environ.iteritems():
            actual_environ[k] = v
        return jobset.JobSpec(
            cmdline=cmdline,
            shortname=shortname,
            environ=actual_environ,
            hash_targets=hash_targets if self.allow_hashing else None)
Ejemplo n.º 6
0
def cloud_to_cloud_jobspec(language,
                           test_cases,
                           server_addresses,
                           test_duration_secs,
                           num_channels_per_server,
                           num_stubs_per_channel,
                           metrics_port,
                           docker_image=None):
  """Creates jobspec for cloud-to-cloud interop test"""
  cmdline = bash_login_cmdline(language.client_cmd([
      '--test_cases=%s' % test_cases, '--server_addresses=%s' %
      server_addresses, '--test_duration_secs=%s' % test_duration_secs,
      '--num_stubs_per_channel=%s' % num_stubs_per_channel,
      '--num_channels_per_server=%s' % num_channels_per_server,
      '--metrics_port=%s' % metrics_port
  ]))
  print cmdline
  cwd = language.client_cwd
  environ = language.global_env()
  if docker_image:
    container_name = dockerjob.random_name('interop_client_%s' %
                                           language.safename)
    cmdline = docker_run_cmdline(
        cmdline,
        image=docker_image,
        environ=environ,
        cwd=cwd,
        docker_args=['--net=host', '--name', container_name])
    cwd = None

  test_job = jobset.JobSpec(cmdline=cmdline,
                            cwd=cwd,
                            environ=environ,
                            shortname='cloud_to_cloud:%s:%s_server:stress_test' % (
                                language, server_name),
                            timeout_seconds=test_duration_secs * 2,
                            flake_retries=0,
                            timeout_retries=0,
                            kill_handler=_job_kill_handler)
  test_job.container_name = container_name
  return test_job
Ejemplo n.º 7
0
def cloud_to_prod_jobspec(language, test_case, docker_image=None, auth=False):
    """Creates jobspec for cloud-to-prod interop test"""
    container_name = None
    cmdargs = [
        '--server_host_override=grpc-test.sandbox.google.com',
        '--server_host=grpc-test.sandbox.google.com', '--server_port=443',
        '--use_tls=true',
        '--test_case=%s' % test_case
    ]
    environ = dict(language.cloud_to_prod_env(), **language.global_env())
    if auth:
        auth_cmdargs, auth_env = auth_options(language, test_case)
        cmdargs += auth_cmdargs
        environ.update(auth_env)
    cmdline = bash_login_cmdline(language.client_cmd(cmdargs))
    cwd = language.client_cwd

    if docker_image:
        container_name = dockerjob.random_name('interop_client_%s' %
                                               language.safename)
        cmdline = docker_run_cmdline(
            cmdline,
            image=docker_image,
            cwd=cwd,
            environ=environ,
            docker_args=['--net=host', '--name', container_name])
        cwd = None
        environ = None

    suite_name = 'cloud_to_prod_auth' if auth else 'cloud_to_prod'
    test_job = jobset.JobSpec(cmdline=cmdline,
                              cwd=cwd,
                              environ=environ,
                              shortname='%s:%s:%s' %
                              (suite_name, language, test_case),
                              timeout_seconds=90,
                              flake_retries=5 if args.allow_flakes else 0,
                              timeout_retries=2 if args.allow_flakes else 0,
                              kill_handler=_job_kill_handler)
    test_job.container_name = container_name
    return test_job
Ejemplo n.º 8
0
def _generate_test_case_jobspecs(lang, runtime, release, suite_name):
    """Returns the list of test cases from testcase files per lang/release."""
    testcase_lines = _read_test_cases_file(lang, runtime, release)

    job_spec_list = []
    for line in testcase_lines:
        # TODO(jtattermusch): revisit the logic for updating test case commands
        # what it currently being done seems fragile.

        # Extract test case name from the command line
        m = re.search(r'--test_case=(\w+)', line)
        testcase_name = m.group(1) if m else 'unknown_test'

        # Extract the server name from the command line
        if '--server_host_override=' in line:
            m = re.search(
                r'--server_host_override=((.*).sandbox.googleapis.com)', line)
        else:
            m = re.search(r'--server_host=((.*).sandbox.googleapis.com)', line)
        server = m.group(1) if m else 'unknown_server'
        server_short = m.group(2) if m else 'unknown_server'

        # replace original server_host argument
        assert '--server_host=' in line
        line = re.sub(r'--server_host=[^ ]*',
                      r'--server_host=%s' % args.server_host, line)

        # some interop tests don't set server_host_override (see #17407),
        # but we need to use it if different host is set via cmdline args.
        if args.server_host != server and not '--server_host_override=' in line:
            line = re.sub(r'(--server_host=[^ ]*)',
                          r'\1 --server_host_override=%s' % server, line)

        spec = jobset.JobSpec(cmdline=line,
                              shortname='%s:%s:%s:%s' %
                              (suite_name, lang, server_short, testcase_name),
                              timeout_seconds=_TEST_TIMEOUT_SECONDS,
                              shell=True,
                              flake_retries=5 if args.allow_flakes else 0)
        job_spec_list.append(spec)
    return job_spec_list
Ejemplo n.º 9
0
def archive_repo():
    """Archives local version of repo including submodules."""
    # TODO: also archive grpc-go and grpc-java repos
    archive_job = jobset.JobSpec(
        cmdline=['tar', '-cf', '../grpc.tar', '../grpc/'],
        shortname='archive_repo',
        timeout_seconds=3 * 60)

    jobset.message('START', 'Archiving local repository.', do_newline=True)
    num_failures, _ = jobset.run([archive_job],
                                 newline_on_success=True,
                                 maxjobs=1)
    if num_failures == 0:
        jobset.message('SUCCESS',
                       'Archive with local repository create successfully.',
                       do_newline=True)
    else:
        jobset.message('FAILED',
                       'Failed to archive local repository.',
                       do_newline=True)
        sys.exit(1)
Ejemplo n.º 10
0
def build_interop_image_jobspec(language, tag=None):
  """Creates jobspec for building interop docker image for a language"""
  if not tag:
    tag = 'grpc_interop_%s:%s' % (language.safename, uuid.uuid4())
  env = {'INTEROP_IMAGE': tag,
         'BASE_NAME': 'grpc_interop_%s' % language.safename}
  if not args.travis:
    env['TTY_FLAG'] = '-t'
  # This env variable is used to get around the github rate limit
  # error when running the PHP `composer install` command
  # TODO(stanleycheung): find a more elegant way to do this
  if language.safename == 'php' and os.path.exists('/var/local/.composer/auth.json'):
    env['BUILD_INTEROP_DOCKER_EXTRA_ARGS'] = \
      '-v /var/local/.composer/auth.json:/root/.composer/auth.json:ro'
  build_job = jobset.JobSpec(
          cmdline=['tools/jenkins/build_interop_image.sh'],
          environ=env,
          shortname='build_docker_%s' % (language),
          timeout_seconds=30*60)
  build_job.tag = tag
  return build_job
Ejemplo n.º 11
0
def create_qpsworker_job(language,
                         shortname=None,
                         port=10000,
                         remote_host=None):
    # TODO: support more languages
    cmdline = language.worker_cmdline() + ['--driver_port=%s' % port]
    if remote_host:
        user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, remote_host)
        cmdline = [
            'ssh',
            str(user_at_host),
            'cd ~/performance_workspace/grpc/ && %s' % ' '.join(cmdline)
        ]
        host_and_port = '%s:%s' % (remote_host, port)
    else:
        host_and_port = 'localhost:%s' % port

    jobspec = jobset.JobSpec(cmdline=cmdline,
                             shortname=shortname,
                             timeout_seconds=15 * 60)
    return QpsWorkerJob(jobspec, language, host_and_port)
Ejemplo n.º 12
0
def build_image_jobspec(runtime, env, gcr_tag, stack_base):
    """Build interop docker image for a language with runtime.

  runtime: a <lang><version> string, for example go1.8.
  env:     dictionary of env to passed to the build script.
  gcr_tag: the tag for the docker image (i.e. v1.3.0).
  stack_base: the local gRPC repo path.
  """
    basename = 'grpc_interop_%s' % runtime
    tag = '%s/%s:%s' % (args.gcr_path, basename, gcr_tag)
    build_env = {'INTEROP_IMAGE': tag, 'BASE_NAME': basename, 'TTY_FLAG': '-t'}
    build_env.update(env)
    image_builder_path = _IMAGE_BUILDER
    if client_matrix.should_build_docker_interop_image_from_release_tag(lang):
        image_builder_path = os.path.join(stack_base, _IMAGE_BUILDER)
    build_job = jobset.JobSpec(cmdline=[image_builder_path],
                               environ=build_env,
                               shortname='build_docker_%s' % runtime,
                               timeout_seconds=30 * 60)
    build_job.tag = tag
    return build_job
Ejemplo n.º 13
0
def _collect_bm_data(bm, cfg, name, reps, idx, loops):
  jobs_list = []
  for line in subprocess.check_output(
    ['bm_diff_%s/%s/%s' % (name, cfg, bm),
     '--benchmark_list_tests']).splitlines():
    stripped_line = line.strip().replace("/", "_").replace(
      "<", "_").replace(">", "_").replace(", ", "_")
    cmd = [
      'bm_diff_%s/%s/%s' % (name, cfg, bm), '--benchmark_filter=^%s$' %
      line, '--benchmark_out=%s.%s.%s.%s.%d.json' %
      (bm, stripped_line, cfg, name, idx), '--benchmark_out_format=json',
      '--benchmark_repetitions=%d' % (reps)
    ]
    jobs_list.append(
      jobset.JobSpec(
        cmd,
        shortname='%s %s %s %s %d/%d' % (bm, line, cfg, name, idx + 1,
                         loops),
        verbose_success=True,
        timeout_seconds=60 * 2))
  return jobs_list
def build_interop_image_jobspec(language, tag=None):
  """Creates jobspec for building interop docker image for a language"""
  if not tag:
    tag = 'grpc_interop_%s:%s' % (language.safename, uuid.uuid4())
  env = {'INTEROP_IMAGE': tag,
         'BASE_NAME': 'grpc_interop_%s' % language.safename}
  if not args.travis:
    env['TTY_FLAG'] = '-t'
  # This env variable is used to get around the github rate limit
  # error when running the PHP `composer install` command
  host_file = '%s/.composer/auth.json' % os.environ['HOME']
  if language.safename == 'php' and os.path.exists(host_file):
    env['BUILD_INTEROP_DOCKER_EXTRA_ARGS'] = \
      '-v %s:/root/.composer/auth.json:ro' % host_file
  build_job = jobset.JobSpec(
          cmdline=['tools/run_tests/dockerize/build_interop_image.sh'],
          environ=env,
          shortname='build_docker_%s' % (language),
          timeout_seconds=30*60)
  build_job.tag = tag
  return build_job
Ejemplo n.º 15
0
def cloud_to_cloud_jobspec(language,
                           test_case,
                           server_name,
                           server_host,
                           server_port,
                           docker_image=None):
    """Creates jobspec for cloud-to-cloud interop test"""
    cmdline = bash_login_cmdline(
        language.client_cmd([
            '--server_host_override=foo.test.google.fr', '--use_tls=true',
            '--use_test_ca=true',
            '--test_case=%s' % test_case,
            '--server_host=%s' % server_host,
            '--server_port=%s' % server_port
        ]))
    cwd = language.client_cwd
    environ = language.global_env()
    if docker_image:
        container_name = dockerjob.random_name('interop_client_%s' %
                                               language.safename)
        cmdline = docker_run_cmdline(
            cmdline,
            image=docker_image,
            environ=environ,
            cwd=cwd,
            docker_args=['--net=host', '--name', container_name])
        cwd = None

    test_job = jobset.JobSpec(cmdline=cmdline,
                              cwd=cwd,
                              environ=environ,
                              shortname='cloud_to_cloud:%s:%s_server:%s' %
                              (language, server_name, test_case),
                              timeout_seconds=_TEST_TIMEOUT,
                              flake_retries=5 if args.allow_flakes else 0,
                              timeout_retries=2 if args.allow_flakes else 0,
                              kill_handler=_job_kill_handler)
    if docker_image:
        test_job.container_name = container_name
    return test_job
def create_qpsworker_job(language,
                         shortname=None,
                         port=10000,
                         remote_host=None):
    cmdline = language.worker_cmdline() + ['--driver_port=%s' % port]
    if remote_host:
        user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, remote_host)
        cmdline = [
            'ssh',
            str(user_at_host),
            'cd ~/performance_workspace/grpc/ && %s' % ' '.join(cmdline)
        ]
        host_and_port = '%s:%s' % (remote_host, port)
    else:
        host_and_port = 'localhost:%s' % port

    jobspec = jobset.JobSpec(
        cmdline=cmdline,
        shortname=shortname,
        timeout_seconds=5 * 60,  # workers get restarted after each scenario
        verbose_success=True)
    return QpsWorkerJob(jobspec, language, host_and_port)
Ejemplo n.º 17
0
def prepare_remote_hosts(hosts):
  """Prepares remote hosts."""
  prepare_jobs = []
  for host in hosts:
    user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, host)
    prepare_jobs.append(
        jobset.JobSpec(
            cmdline=['tools/run_tests/performance/remote_host_prepare.sh'],
            shortname='remote_host_prepare.%s' % host,
            environ = {'USER_AT_HOST': user_at_host},
            timeout_seconds=5*60))
  jobset.message('START', 'Preparing remote hosts.', do_newline=True)
  num_failures, _ = jobset.run(
      prepare_jobs, newline_on_success=True, maxjobs=10)
  if num_failures == 0:
    jobset.message('SUCCESS',
                   'Remote hosts ready to start build.',
                   do_newline=True)
  else:
    jobset.message('FAILED', 'Failed to prepare remote hosts.',
                   do_newline=True)
    sys.exit(1)
Ejemplo n.º 18
0
def find_test_cases(lang, runtime, release, suite_name):
    """Returns the list of test cases from testcase files per lang/release."""
    file_tmpl = os.path.join(os.path.dirname(__file__), 'testcases/%s__%s')
    testcase_release = release
    filename_prefix = lang
    if lang == 'csharp':
        filename_prefix = runtime
    if not os.path.exists(file_tmpl % (filename_prefix, release)):
        testcase_release = 'master'
    testcases = file_tmpl % (filename_prefix, testcase_release)

    job_spec_list = []
    try:
        with open(testcases) as f:
            # Only line start with 'docker run' are test cases.
            for line in f.readlines():
                if line.startswith('docker run'):
                    m = re.search('--test_case=(.*)"', line)
                    shortname = m.group(1) if m else 'unknown_test'
                    m = re.search(
                        '--server_host_override=(.*).sandbox.googleapis.com',
                        line)
                    server = m.group(1) if m else 'unknown_server'
                    spec = jobset.JobSpec(
                        cmdline=line,
                        shortname='%s:%s:%s:%s' %
                        (suite_name, lang, server, shortname),
                        timeout_seconds=_TEST_TIMEOUT,
                        shell=True,
                        flake_retries=5 if args.allow_flakes else 0)
                    job_spec_list.append(spec)
            jobset.message('START',
                           'Loaded %s tests from %s' %
                           (len(job_spec_list), testcases),
                           do_newline=True)
    except IOError as err:
        jobset.message('FAILED', err, do_newline=True)
    return job_spec_list
Ejemplo n.º 19
0
make_targets = {}
for l in languages:
  makefile = l.makefile_name()
  make_targets[makefile] = make_targets.get(makefile, set()).union(
      set(l.make_targets()))

def build_step_environ(cfg):
  environ = {'CONFIG': cfg}
  msbuild_cfg = _MSBUILD_CONFIG.get(cfg)
  if msbuild_cfg:
    environ['MSBUILD_CONFIG'] = msbuild_cfg
  return environ

build_steps = list(set(
                   jobset.JobSpec(cmdline, environ=build_step_environ(build_config), flake_retries=5)
                   for l in languages
                   for cmdline in l.pre_build_steps()))
if make_targets:
  make_commands = itertools.chain.from_iterable(make_jobspec(build_config, list(targets), makefile) for (makefile, targets) in make_targets.iteritems())
  build_steps.extend(set(make_commands))
build_steps.extend(set(
                   jobset.JobSpec(cmdline, environ=build_step_environ(build_config), timeout_seconds=None)
                   for l in languages
                   for cmdline in l.build_steps()))

post_tests_steps = list(set(
                        jobset.JobSpec(cmdline, environ=build_step_environ(build_config))
                        for l in languages
                        for cmdline in l.post_tests_steps()))
runs_per_test = args.runs_per_test
Ejemplo n.º 20
0
 def job_spec(self, binary, hash_targets):
     return jobset.JobSpec(
         cmdline=['valgrind', '--tool=%s' % self.tool] + self.args +
         [binary],
         shortname='valgrind %s' % binary,
         hash_targets=None)
Ejemplo n.º 21
0
base_cmd = ['python2.7', 'tools/buildgen/mako_renderer.py']
cmd = base_cmd[:]
for plugin in plugins:
    cmd.append('-p')
    cmd.append(plugin)
for js in json:
    cmd.append('-d')
    cmd.append(js)
cmd.append('-w')
preprocessed_build = '.preprocessed_build'
cmd.append(preprocessed_build)
if args.output_merged is not None:
    cmd.append('-M')
    cmd.append(args.output_merged)
pre_jobs.append(
    jobset.JobSpec(cmd, shortname='preprocess', timeout_seconds=None))

jobs = []
for template in reversed(sorted(templates)):
    root, f = os.path.split(template)
    if os.path.splitext(f)[1] == '.template':
        out_dir = '.' + root[len('templates'):]
        out = out_dir + '/' + os.path.splitext(f)[0]
        if not os.path.exists(out_dir):
            os.makedirs(out_dir)
        cmd = base_cmd[:]
        cmd.append('-P')
        cmd.append(preprocessed_build)
        cmd.append('-o')
        if test is None:
            cmd.append(out)
Ejemplo n.º 22
0
                  nargs='+',
                  default=sorted(_LANGUAGES.keys()))
args = argp.parse_args()

# grab config
run_configs = set(_CONFIGS[cfg] for cfg in itertools.chain.from_iterable(
    _CONFIGS.iterkeys() if x == 'all' else [x] for x in args.config))
build_configs = set(cfg.build_config for cfg in run_configs)

make_targets = []
languages = set(_LANGUAGES[l] for l in args.language)
build_steps = [
    jobset.JobSpec([
        'make', '-j',
        '%d' % (multiprocessing.cpu_count() + 1),
        'CONFIG=%s' % cfg
    ] + list(
        set(itertools.chain.from_iterable(l.make_targets()
                                          for l in languages))))
    for cfg in build_configs
] + list(
    set(
        jobset.JobSpec(cmdline) for l in languages
        for cmdline in l.build_steps()))
one_run = set(spec for config in run_configs for language in args.language
              for spec in _LANGUAGES[language].test_specs(config)
              if re.search(args.regex, spec.shortname))

runs_per_test = args.runs_per_test
forever = args.forever
Ejemplo n.º 23
0
 def make_jobspec(cfg, targets):
   return jobset.JobSpec(['make.bat', 'CONFIG=%s' % cfg] + targets,
                         cwd='vsprojects', shell=True)
Ejemplo n.º 24
0
build_configs = set(cfg.build_config for cfg in run_configs)

make_targets = []
languages = set(_LANGUAGES[l] for l in args.language)

if len(build_configs) > 1:
    for language in languages:
        if not language.supports_multi_config():
            print language, 'does not support multiple build configurations'
            sys.exit(1)

build_steps = [
    jobset.JobSpec([
        'make', '-j',
        '%d' % (multiprocessing.cpu_count() + 1),
        'EXTRA_DEFINES=GRPC_TEST_SLOWDOWN_MACHINE_FACTOR=%f' % args.slowdown,
        'CONFIG=%s' % cfg
    ] + list(
        set(itertools.chain.from_iterable(l.make_targets()
                                          for l in languages))))
    for cfg in build_configs
] + list(
    set(
        jobset.JobSpec(cmdline, environ={'CONFIG': cfg})
        for cfg in build_configs for l in languages
        for cmdline in l.build_steps()))
one_run = set(spec for config in run_configs for language in args.language
              for spec in _LANGUAGES[language].test_specs(config, args.travis)
              if re.search(args.regex, spec.shortname))

runs_per_test = args.runs_per_test
forever = args.forever
Ejemplo n.º 25
0
    return jobset.JobSpec(['make.bat', 'CONFIG=%s' % cfg] + targets,
                          cwd='vsprojects', shell=True)
else:
  def make_jobspec(cfg, targets):
    return jobset.JobSpec(['make',
                           '-j', '%d' % (multiprocessing.cpu_count() + 1),
                           'EXTRA_DEFINES=GRPC_TEST_SLOWDOWN_MACHINE_FACTOR=%f' %
                               args.slowdown,
                           'CONFIG=%s' % cfg] + targets)

build_steps = [make_jobspec(cfg,
                            list(set(itertools.chain.from_iterable(
                                         l.make_targets() for l in languages))))
               for cfg in build_configs]
build_steps.extend(set(
                   jobset.JobSpec(cmdline, environ={'CONFIG': cfg})
                   for cfg in build_configs
                   for l in languages
                   for cmdline in l.build_steps()))
one_run = set(
    spec
    for config in run_configs
    for language in languages
    for spec in language.test_specs(config, args.travis)
    if re.search(args.regex, spec.shortname))

runs_per_test = args.runs_per_test
forever = args.forever


class TestCache(object):
Ejemplo n.º 26
0
                args.slowdown,
                'CONFIG=%s' % cfg
            ] + targets,
                           timeout_seconds=30 * 60)
        ]


make_targets = {}
for l in languages:
    makefile = l.makefile_name()
    make_targets[makefile] = make_targets.get(makefile, set()).union(
        set(l.make_targets()))

build_steps = list(
    set(
        jobset.JobSpec(cmdline, environ={'CONFIG': cfg}, flake_retries=5)
        for cfg in build_configs for l in languages
        for cmdline in l.pre_build_steps()))
if make_targets:
    make_commands = itertools.chain.from_iterable(
        make_jobspec(cfg, list(targets), makefile) for cfg in build_configs
        for (makefile, targets) in make_targets.iteritems())
    build_steps.extend(set(make_commands))
build_steps.extend(
    set(
        jobset.JobSpec(
            cmdline, environ={'CONFIG': cfg}, timeout_seconds=10 * 60)
        for cfg in build_configs for l in languages
        for cmdline in l.build_steps()))

runs_per_test = args.runs_per_test
Ejemplo n.º 27
0
 def job_spec(self, cmdline, hash_targets):
   return jobset.JobSpec(cmdline=['valgrind', '--tool=%s' % self.tool] +
                         self.args + cmdline,
                         shortname='valgrind %s' % cmdline[0],
                         hash_targets=None)
Ejemplo n.º 28
0
 def job_spec(self, binary, hash_targets):
     return jobset.JobSpec(
         cmdline=[binary],
         environ=self.environ,
         hash_targets=hash_targets if self.allow_hashing else None)
Ejemplo n.º 29
0
 def make_jobspec(cfg, targets):
   return jobset.JobSpec(['make',
                          '-j', '%d' % (multiprocessing.cpu_count() + 1),
                          'EXTRA_DEFINES=GRPC_TEST_SLOWDOWN_MACHINE_FACTOR=%f' %
                              args.slowdown,
                          'CONFIG=%s' % cfg] + targets)
Ejemplo n.º 30
0
    '--config=' + config,
]

if args.fix:
    cmdline.append('--fix')

if args.only_changed:
    orig_files = set(args.files)
    actual_files = []
    output = subprocess.check_output(
        ['git', 'diff', 'origin/master', 'HEAD', '--name-only'])
    for line in output.decode('ascii').splitlines(False):
        if line in orig_files:
            print("check: %s" % line)
            actual_files.append(line)
        else:
            print("skip: %s - not in the build" % line)
    args.files = actual_files

jobs = []
for filename in args.files:
    jobs.append(
        jobset.JobSpec(
            cmdline + [filename],
            shortname=filename,
            timeout_seconds=15 * 60,
        ))

num_fails, res_set = jobset.run(jobs, maxjobs=args.jobs, quiet_success=True)
sys.exit(num_fails)