def add_files_to_image(image, with_files, label=None): """Add files to a docker image. image: docker image name, i.e. grpc_interop_java:26328ad8 with_files: additional files to include in the docker image. label: label string to attach to the image. """ tag_idx = image.find(':') if tag_idx == -1: jobset.message('FAILED', 'invalid docker image %s' % image, do_newline=True) sys.exit(1) orig_tag = '%s_' % image subprocess.check_output(['docker', 'tag', image, orig_tag]) lines = ['FROM ' + orig_tag] if label: lines.append('LABEL %s' % label) temp_dir = tempfile.mkdtemp() atexit.register(lambda: subprocess.call(['rm', '-rf', temp_dir])) # Copy with_files inside the tmp directory, which will be the docker build # context. for f in with_files: shutil.copy(f, temp_dir) lines.append('COPY %s %s/' % (os.path.basename(f), _BUILD_INFO)) # Create a Dockerfile. with open(os.path.join(temp_dir, 'Dockerfile'), 'w') as f: f.write('\n'.join(lines)) jobset.message('START', 'Repackaging %s' % image, do_newline=True) build_cmd = ['docker', 'build', '--rm', '--tag', image, temp_dir] subprocess.check_output(build_cmd) dockerjob.remove_image(orig_tag, skip_nonexistent=True)
def checkout_grpc_stack(lang, release): """Invokes 'git check' for the lang/release and returns directory created.""" assert args.git_checkout and args.git_checkout_root if not os.path.exists(args.git_checkout_root): os.makedirs(args.git_checkout_root) repo = client_matrix.get_github_repo(lang) # Get the subdir name part of repo # For example, '[email protected]:grpc/grpc-go.git' should use 'grpc-go'. repo_dir = os.path.splitext(os.path.basename(repo))[0] stack_base = os.path.join(args.git_checkout_root, repo_dir) # Assume the directory is reusable for git checkout. if not os.path.exists(stack_base): subprocess.check_call(['git', 'clone', '--recursive', repo], cwd=os.path.dirname(stack_base)) # git checkout. jobset.message('START', 'git checkout %s from %s' % (release, stack_base), do_newline=True) # We should NEVER do checkout on current tree !!! assert not os.path.dirname(__file__).startswith(stack_base) output = subprocess.check_output( ['git', 'checkout', release], cwd=stack_base, stderr=subprocess.STDOUT) commit_log = subprocess.check_output(['git', 'log', '-1'], cwd=stack_base) jobset.message('SUCCESS', 'git checkout', output + commit_log, do_newline=True) # Write git log to commit_log so it can be packaged with the docker image. with open(os.path.join(stack_base, 'commit_log'), 'w') as f: f.write(commit_log) return stack_base
def maybe_apply_patches_on_git_tag(stack_base, lang, release): files_to_patch = [] for release_info in client_matrix.LANG_RELEASE_MATRIX[lang]: if client_matrix.get_release_tag_name(release_info) == release: if release_info[release] is not None: files_to_patch = release_info[release].get('patch') break if not files_to_patch: return patch_file_relative_path = 'patches/%s_%s/git_repo.patch' % (lang, release) patch_file = os.path.abspath( os.path.join(os.path.dirname(__file__), patch_file_relative_path)) if not os.path.exists(patch_file): jobset.message('FAILED', 'expected patch file |%s| to exist' % patch_file) sys.exit(1) subprocess.check_output( ['git', 'apply', patch_file], cwd=stack_base, stderr=subprocess.STDOUT) for repo_relative_path in files_to_patch: subprocess.check_output( ['git', 'add', repo_relative_path], cwd=stack_base, stderr=subprocess.STDOUT) subprocess.check_output( [ 'git', 'commit', '-m', ('Hack performed on top of %s git ' 'tag in order to build and run the %s ' 'interop tests on that tag.' % (lang, release)) ], cwd=stack_base, stderr=subprocess.STDOUT)
def _get_test_images_for_lang(lang, release_arg, image_path_prefix): """Find docker images for a language across releases and runtimes. Returns dictionary of list of (<tag>, <image-full-path>) keyed by runtime. """ if release_arg == 'all': # Use all defined releases for given language releases = client_matrix.get_release_tags(lang) else: # Look for a particular release. if release_arg not in client_matrix.get_release_tags(lang): jobset.message( 'SKIPPED', 'release %s for %s is not defined' % (release_arg, lang), do_newline=True) return {} releases = [release_arg] # Image tuples keyed by runtime. images = {} for tag in releases: for runtime in client_matrix.get_runtimes_for_lang_release(lang, tag): image_name = '%s/grpc_interop_%s:%s' % (image_path_prefix, runtime, tag) image_tuple = (tag, image_name) if not images.has_key(runtime): images[runtime] = [] images[runtime].append(image_tuple) return images
def test_specs(self, config, travis): job_specifications = [] for test in self._tests: command = None short_name = None if "module" in test: command = ["tools/run_tests/run_python.sh", "-m", test["module"]] short_name = test["module"] elif "file" in test: command = ["tools/run_tests/run_python.sh", test["file"]] short_name = test["file"] else: raise ValueError("expected input to be a module or file to run " "unittests from") for python_version in test["pythonVersions"]: if python_version in self._has_python_versions: environment = dict(_FORCE_ENVIRON_FOR_WRAPPERS) environment["PYVER"] = python_version job_specifications.append(config.job_spec(command, None, environ=environment, shortname=short_name)) else: jobset.message( "WARNING", "Could not find Python {}; skipping test".format(python_version), "{}\n".format(command), do_newline=True, ) return job_specifications
def find_test_cases(lang, release): """Returns the list of test cases from testcase files per lang/release.""" file_tmpl = os.path.join(os.path.dirname(__file__), 'testcases/%s__%s') if not os.path.exists(file_tmpl % (lang, release)): release = 'master' testcases = file_tmpl % (lang, release) if lang in _loaded_testcases.keys() and release in _loaded_testcases[lang].keys(): return _loaded_testcases[lang][release] job_spec_list=[] try: with open(testcases) as f: # Only line start with 'docker run' are test cases. for line in f.readlines(): if line.startswith('docker run'): m = re.search('--test_case=(.*)"', line) shortname = m.group(1) if m else 'unknown_test' spec = jobset.JobSpec(cmdline=line, shortname=shortname, timeout_seconds=_TEST_TIMEOUT, shell=True) job_spec_list.append(spec) jobset.message('START', 'Loaded %s tests from %s' % (len(job_spec_list), testcases), do_newline=True) except IOError as err: jobset.message('FAILED', err, do_newline=True) if lang not in _loaded_testcases.keys(): _loaded_testcases[lang] = {} _loaded_testcases[lang][release]=job_spec_list return job_spec_list
def test_specs(self, config, travis): job_specifications = [] for test in self._tests: command = None short_name = None if 'module' in test: command = ['tools/run_tests/run_python.sh', '-m', test['module']] short_name = test['module'] elif 'file' in test: command = ['tools/run_tests/run_python.sh', test['file']] short_name = test['file'] else: raise ValueError('expected input to be a module or file to run ' 'unittests from') for python_version in test['pythonVersions']: if python_version in self._has_python_versions: environment = dict(_FORCE_ENVIRON_FOR_WRAPPERS) environment['PYVER'] = python_version job_specifications.append(config.job_spec( command, None, environ=environment, shortname=short_name)) else: jobset.message( 'WARNING', 'Could not find Python {}; skipping test'.format(python_version), '{}\n'.format(command), do_newline=True) return job_specifications
def register(self, args, process_num=1): jobset.message('START', '[%s] register %s' % (process_num, ' '.join(args))) id = len(self.groups) + 1 info = dict(id=id, args=args, process_num=process_num) self.groups.append(info) jobset.message('SUCCESS', 'registed %s' % args)
def build_all_images_for_lang(lang): """Build all docker images for a language across releases and runtimes.""" if not args.git_checkout: if args.release != 'master': print('WARNING: --release is set but will be ignored\n') releases = ['master'] else: if args.release == 'all': releases = client_matrix.LANG_RELEASE_MATRIX[lang] else: # Build a particular release. if args.release not in [ 'master' ] + client_matrix.LANG_RELEASE_MATRIX[lang]: jobset.message('SKIPPED', '%s for %s is not defined' % (args.release, lang), do_newline=True) return [] releases = [args.release] images = [] for release in releases: images += build_all_images_for_release(lang, release) jobset.message('SUCCESS', 'All docker images built for %s at %s.' % (lang, releases), do_newline=True) return images
def _get_test_images_for_lang(lang, release_arg, image_path_prefix): """Find docker images for a language across releases and runtimes. Returns dictionary of list of (<tag>, <image-full-path>) keyed by runtime. """ if release_arg == 'all': # Use all defined releases for given language releases = client_matrix.get_release_tags(lang) else: # Look for a particular release. if release_arg not in client_matrix.get_release_tags(lang): jobset.message('SKIPPED', 'release %s for %s is not defined' % (release_arg, lang), do_newline=True) return {} releases = [release_arg] # Image tuples keyed by runtime. images = {} for tag in releases: for runtime in client_matrix.get_runtimes_for_lang_release(lang, tag): image_name = '%s/grpc_interop_%s:%s' % (image_path_prefix, runtime, tag) image_tuple = (tag, image_name) if not images.has_key(runtime): images[runtime] = [] images[runtime].append(image_tuple) return images
def _test_release(lang, runtime, release, image, xml_report_tree, skip_tests): total_num_failures = 0 suite_name = '%s__%s_%s' % (lang, runtime, release) job_spec_list = _generate_test_case_jobspecs(lang, runtime, release, suite_name) if not job_spec_list: jobset.message('FAILED', 'No test cases were found.', do_newline=True) total_num_failures += 1 else: num_failures, resultset = jobset.run(job_spec_list, newline_on_success=True, add_env={'docker_image': image}, maxjobs=args.jobs, skip_jobs=skip_tests) if args.bq_result_table and resultset: upload_test_results.upload_interop_results_to_bq( resultset, args.bq_result_table) if skip_tests: jobset.message('FAILED', 'Tests were skipped', do_newline=True) total_num_failures += 1 if num_failures: total_num_failures += num_failures report_utils.append_junit_xml_results(xml_report_tree, resultset, 'grpc_interop_matrix', suite_name, str(uuid.uuid4())) return total_num_failures
def maybe_apply_patches_on_git_tag(stack_base, lang, release): files_to_patch = [] for release_info in client_matrix.LANG_RELEASE_MATRIX[lang]: if client_matrix.get_release_tag_name(release_info) == release: if release_info[release] is not None: files_to_patch = release_info[release].get('patch') break if not files_to_patch: return patch_file_relative_path = 'patches/%s_%s/git_repo.patch' % (lang, release) patch_file = os.path.abspath( os.path.join(os.path.dirname(__file__), patch_file_relative_path)) if not os.path.exists(patch_file): jobset.message('FAILED', 'expected patch file |%s| to exist' % patch_file) sys.exit(1) subprocess.check_output(['git', 'apply', patch_file], cwd=stack_base, stderr=subprocess.STDOUT) for repo_relative_path in files_to_patch: subprocess.check_output(['git', 'add', repo_relative_path], cwd=stack_base, stderr=subprocess.STDOUT) subprocess.check_output([ 'git', 'commit', '-m', ('Hack performed on top of %s git ' 'tag in order to build and run the %s ' 'interop tests on that tag.' % (lang, release)) ], cwd=stack_base, stderr=subprocess.STDOUT)
def build_all_images_for_lang(lang): """Build all docker images for a language across releases and runtimes.""" if not args.git_checkout: if args.release != 'master': print( 'Cannot use --release without also enabling --git_checkout.\n') sys.exit(1) releases = [args.release] else: if args.release == 'all': releases = client_matrix.get_release_tags(lang) else: # Build a particular release. if args.release not in ['master' ] + client_matrix.get_release_tags(lang): jobset.message('SKIPPED', '%s for %s is not defined' % (args.release, lang), do_newline=True) return [] releases = [args.release] images = [] for release in releases: images += build_all_images_for_release(lang, release) jobset.message('SUCCESS', 'All docker images built for %s at %s.' % (lang, releases), do_newline=True) return images
def find_test_cases(lang, release, suite_name): """Returns the list of test cases from testcase files per lang/release.""" file_tmpl = os.path.join(os.path.dirname(__file__), 'testcases/%s__%s') testcase_release = release if not os.path.exists(file_tmpl % (lang, release)): testcase_release = 'master' testcases = file_tmpl % (lang, testcase_release) job_spec_list = [] try: with open(testcases) as f: # Only line start with 'docker run' are test cases. for line in f.readlines(): if line.startswith('docker run'): m = re.search('--test_case=(.*)"', line) shortname = m.group(1) if m else 'unknown_test' m = re.search( '--server_host_override=(.*).sandbox.googleapis.com', line) server = m.group(1) if m else 'unknown_server' spec = jobset.JobSpec( cmdline=line, shortname='%s:%s:%s:%s' % (suite_name, lang, server, shortname), timeout_seconds=_TEST_TIMEOUT, shell=True, flake_retries=5 if args.allow_flakes else 0) job_spec_list.append(spec) jobset.message('START', 'Loaded %s tests from %s' % (len(job_spec_list), testcases), do_newline=True) except IOError as err: jobset.message('FAILED', err, do_newline=True) return job_spec_list
def build_all_images_for_lang(lang): """Build all docker images for a language across releases and runtimes.""" if not args.git_checkout: if args.release != 'master': print('WARNING: --release is set but will be ignored\n') releases = ['master'] else: if args.release == 'all': releases = client_matrix.get_release_tags(lang) else: # Build a particular release. if args.release not in ['master'] + client_matrix.get_release_tags( lang): jobset.message( 'SKIPPED', '%s for %s is not defined' % (args.release, lang), do_newline=True) return [] releases = [args.release] images = [] for release in releases: images += build_all_images_for_release(lang, release) jobset.message( 'SUCCESS', 'All docker images built for %s at %s.' % (lang, releases), do_newline=True) return images
def run(): pid = os.getpid() msg = str(pid) parser = argparse.ArgumentParser(description='Run Server on PORT') parser.add_argument('-P', metavar='P', type=int, nargs='+', help='an integer for gRPC Server port') args = parser.parse_args() if args and args.P: port = args.P[-1] jobset.message('START', 'Run hello on port %s' % port, do_newline=True) c = get_client() start = time.time() tt = int(total / cpu_count) for i in range(tt): r = c.hello(msg) assert msg in str(r) end = time.time() diff = end - start qps = total / diff jobset.message('SUCCESS', 'Done hello total=%s, ' 'time diff=%s, qps=%s' % (total, diff, qps), do_newline=True)
def find_test_cases(lang, runtime, release, suite_name): """Returns the list of test cases from testcase files per lang/release.""" testcase_dir = os.path.join(os.path.dirname(__file__), 'testcases') filename_prefix = lang if lang == 'csharp': filename_prefix = runtime # Check to see if we need to use a particular version of test cases. lang_version = '%s_%s' % (filename_prefix, release) if lang_version in client_matrix.TESTCASES_VERSION_MATRIX: testcases = os.path.join( testcase_dir, client_matrix.TESTCASES_VERSION_MATRIX[lang_version]) else: testcases = os.path.join(testcase_dir, '%s__master' % filename_prefix) job_spec_list = [] try: with open(testcases) as f: # Only line start with 'docker run' are test cases. for line in f.readlines(): if line.startswith('docker run'): m = re.search('--test_case=(.*)"', line) shortname = m.group(1) if m else 'unknown_test' m = re.search( '--server_host_override=(.*).sandbox.googleapis.com', line) server = m.group(1) if m else 'unknown_server' # If server_host arg is not None, replace the original # server_host with the one provided or append to the end of # the command if server_host does not appear originally. if args.server_host: if line.find('--server_host=') > -1: line = re.sub( '--server_host=[^ ]*', '--server_host=%s' % args.server_host, line) else: line = '%s --server_host=%s"' % (line[:-1], args.server_host) print(line) spec = jobset.JobSpec( cmdline=line, shortname='%s:%s:%s:%s' % (suite_name, lang, server, shortname), timeout_seconds=_TEST_TIMEOUT, shell=True, flake_retries=5 if args.allow_flakes else 0) job_spec_list.append(spec) jobset.message('START', 'Loaded %s tests from %s' % (len(job_spec_list), testcases), do_newline=True) except IOError as err: jobset.message('FAILED', err, do_newline=True) return job_spec_list
def find_test_cases(lang, runtime, release, suite_name): """Returns the list of test cases from testcase files per lang/release.""" testcase_dir = os.path.join(os.path.dirname(__file__), 'testcases') filename_prefix = lang if lang == 'csharp': filename_prefix = runtime # Check to see if we need to use a particular version of test cases. lang_version = '%s_%s' % (filename_prefix, release) if lang_version in client_matrix.TESTCASES_VERSION_MATRIX: testcases = os.path.join( testcase_dir, client_matrix.TESTCASES_VERSION_MATRIX[lang_version]) else: testcases = os.path.join(testcase_dir, '%s__master' % filename_prefix) job_spec_list = [] try: with open(testcases) as f: # Only line start with 'docker run' are test cases. for line in f.readlines(): if line.startswith('docker run'): m = re.search('--test_case=(.*)"', line) shortname = m.group(1) if m else 'unknown_test' m = re.search( '--server_host_override=(.*).sandbox.googleapis.com', line) server = m.group(1) if m else 'unknown_server' # If server_host arg is not None, replace the original # server_host with the one provided or append to the end of # the command if server_host does not appear originally. if args.server_host: if line.find('--server_host=') > -1: line = re.sub('--server_host=[^ ]*', '--server_host=%s' % args.server_host, line) else: line = '%s --server_host=%s"' % (line[:-1], args.server_host) print(line) spec = jobset.JobSpec( cmdline=line, shortname='%s:%s:%s:%s' % (suite_name, lang, server, shortname), timeout_seconds=_TEST_TIMEOUT, shell=True, flake_retries=5 if args.allow_flakes else 0) job_spec_list.append(spec) jobset.message( 'START', 'Loaded %s tests from %s' % (len(job_spec_list), testcases), do_newline=True) except IOError as err: jobset.message('FAILED', err, do_newline=True) return job_spec_list
def checkout_grpc_stack(lang, release): """Invokes 'git check' for the lang/release and returns directory created.""" assert args.git_checkout and args.git_checkout_root if not os.path.exists(args.git_checkout_root): os.makedirs(args.git_checkout_root) repo = client_matrix.get_github_repo(lang) # Get the subdir name part of repo # For example, '[email protected]:grpc/grpc-go.git' should use 'grpc-go'. repo_dir = os.path.splitext(os.path.basename(repo))[0] stack_base = os.path.join(args.git_checkout_root, repo_dir) # Clean up leftover repo dir if necessary. if not args.reuse_git_root and os.path.exists(stack_base): jobset.message('START', 'Removing git checkout root.', do_newline=True) shutil.rmtree(stack_base) if not os.path.exists(stack_base): subprocess.check_call(['git', 'clone', '--recursive', repo], cwd=os.path.dirname(stack_base)) # git checkout. jobset.message('START', 'git checkout %s from %s' % (release, stack_base), do_newline=True) # We should NEVER do checkout on current tree !!! assert not os.path.dirname(__file__).startswith(stack_base) output = subprocess.check_output(['git', 'checkout', release], cwd=stack_base, stderr=subprocess.STDOUT) maybe_apply_patches_on_git_tag(stack_base, lang, release) commit_log = subprocess.check_output(['git', 'log', '-1'], cwd=stack_base) jobset.message('SUCCESS', 'git checkout', '%s: %s' % (str(output), commit_log), do_newline=True) # git submodule update jobset.message('START', 'git submodule update --init at %s from %s' % (release, stack_base), do_newline=True) subprocess.check_call(['git', 'submodule', 'update', '--init'], cwd=stack_base, stderr=subprocess.STDOUT) jobset.message('SUCCESS', 'git submodule update --init', '%s: %s' % (str(output), commit_log), do_newline=True) # Write git log to commit_log so it can be packaged with the docker image. with open(os.path.join(stack_base, 'commit_log'), 'w') as f: f.write(commit_log) return stack_base
def build_steps(self): commands = [] for python_version in self._build_python_versions: try: with open(os.devnull, "w") as output: subprocess.check_call(["which", "python" + python_version], stdout=output, stderr=output) commands.append(["tools/run_tests/build_python.sh", python_version]) self._has_python_versions.append(python_version) except: jobset.message("WARNING", "Missing Python " + python_version, do_newline=True) return commands
def _run_tests_for_lang(lang, runtime, images, xml_report_tree): """Find and run all test cases for a language. images is a list of (<release-tag>, <image-full-path>) tuple. """ skip_tests = False if not _pull_images_for_lang(lang, images): jobset.message( 'FAILED', 'Image download failed. Skipping tests for language "%s"' % lang, do_newline=True) skip_tests = True total_num_failures = 0 for release, image in images: suite_name = '%s__%s_%s' % (lang, runtime, release) job_spec_list = _generate_test_case_jobspecs(lang, runtime, release, suite_name) if not job_spec_list: jobset.message( 'FAILED', 'No test cases were found.', do_newline=True) total_num_failures += 1 continue num_failures, resultset = jobset.run( job_spec_list, newline_on_success=True, add_env={'docker_image': image}, maxjobs=args.jobs, skip_jobs=skip_tests) if args.bq_result_table and resultset: upload_test_results.upload_interop_results_to_bq( resultset, args.bq_result_table) if skip_tests: jobset.message('FAILED', 'Tests were skipped', do_newline=True) total_num_failures += 1 elif num_failures: jobset.message('FAILED', 'Some tests failed', do_newline=True) total_num_failures += num_failures else: jobset.message('SUCCESS', 'All tests passed', do_newline=True) report_utils.append_junit_xml_results(xml_report_tree, resultset, 'grpc_interop_matrix', suite_name, str(uuid.uuid4())) # cleanup all downloaded docker images for _, image in images: if not args.keep: _cleanup_docker_image(image) return total_num_failures
def _run_tests_for_lang(lang, runtime, images, xml_report_tree): """Find and run all test cases for a language. images is a list of (<release-tag>, <image-full-path>) tuple. """ skip_tests = False if not _pull_images_for_lang(lang, images): jobset.message( 'FAILED', 'Image download failed. Skipping tests for language "%s"' % lang, do_newline=True) skip_tests = True total_num_failures = 0 for release, image in images: suite_name = '%s__%s_%s' % (lang, runtime, release) job_spec_list = _generate_test_case_jobspecs(lang, runtime, release, suite_name) if not job_spec_list: jobset.message('FAILED', 'No test cases were found.', do_newline=True) total_num_failures += 1 continue num_failures, resultset = jobset.run(job_spec_list, newline_on_success=True, add_env={'docker_image': image}, maxjobs=args.jobs, skip_jobs=skip_tests) if args.bq_result_table and resultset: upload_test_results.upload_interop_results_to_bq( resultset, args.bq_result_table) if skip_tests: jobset.message('FAILED', 'Tests were skipped', do_newline=True) total_num_failures += 1 elif num_failures: jobset.message('FAILED', 'Some tests failed', do_newline=True) total_num_failures += num_failures else: jobset.message('SUCCESS', 'All tests passed', do_newline=True) report_utils.append_junit_xml_results(xml_report_tree, resultset, 'grpc_interop_matrix', suite_name, str(uuid.uuid4())) # cleanup all downloaded docker images for _, image in images: if not args.keep: _cleanup_docker_image(image) return total_num_failures
def build_steps(self): commands = [] for python_version in self._build_python_versions: try: with open(os.devnull, 'w') as output: subprocess.check_call(['which', 'python' + python_version], stdout=output, stderr=output) commands.append(['tools/run_tests/build_python.sh', python_version]) self._has_python_versions.append(python_version) except: jobset.message('WARNING', 'Missing Python ' + python_version, do_newline=True) return commands
def spawn(self): for group in self.groups: try: process_num = group.get('process_num') args = group.get('args') _ps = [] for i in range(process_num): proc = subprocess.Popen(args, preexec_fn=os.setsid) _ps.append(proc) group.update(proc_list=_ps) except Exception as e: jobset.message('FAIL', str(e)) return
def stop(self): jobset.message('START', 'Start shutdown subprocess') for p in self.processes: try: p.terminate() p.wait() try: os.killpg(p.pid, signal.SIGTERM) except OSError: pass except: pass self.running = False jobset.message('SUCCESS', 'Done', do_newline=True)
def run_collect_perf_profile_jobs(hosts_and_base_names, scenario_name): perf_report_jobs = [] global profile_output_files for host_and_port in hosts_and_base_names: perf_base_name = hosts_and_base_names[host_and_port] output_filename = '%s-%s' % (scenario_name, perf_base_name) # from the base filename, create .svg output filename host = host_and_port.split(':')[0] profile_output_files.append('%s.svg' % output_filename) perf_report_jobs.append(perf_report_processor_job(host, perf_base_name, output_filename)) jobset.message('START', 'Collecting perf reports from qps workers', do_newline=True) failures, _ = jobset.run(perf_report_jobs, newline_on_success=True, maxjobs=1) jobset.message('END', 'Collecting perf reports from qps workers', do_newline=True) return failures
def start(self): jobset.message('START', 'Start running worker background') prepare_jobs = [] ports = [port + i for i in range(self.cpu_count)] for p in ports: exc = '%s/touch_test.py' % CURRENT_DIR prepare_jobs.append(([sys.executable, exc, '-P', str(p)], )) processes = [] for job in prepare_jobs: servers = lambda: subprocess.Popen(job[0]) process = servers() processes.append(process) self.processes = processes jobset.message('SUCCESS', 'Runing Worker [cores=%s]' % self.cpu_count) self.running = True
def start(self): jobset.message('START', 'Start running worker background') prepare_jobs = [] ports = [port + i for i in range(self.cpu_count)] for p in ports: exc = '%s/touch_test.py' % CURRENT_DIR prepare_jobs.append( ( [sys.executable, exc, '-P', str(p)], ) ) processes = [] for job in prepare_jobs: servers = lambda: subprocess.Popen(job[0]) process = servers() processes.append(process) self.processes = processes jobset.message('SUCCESS', 'Runing Worker [cores=%s]' % self.cpu_count) self.running = True
def find_all_images_for_lang(lang): """Find docker images for a language across releases and runtimes. Returns dictionary of list of (<tag>, <image-full-path>) keyed by runtime. """ # Find all defined releases. if args.release == 'all': releases = ['master'] + client_matrix.LANG_RELEASE_MATRIX[lang] else: # Look for a particular release. if args.release not in ['master'] + client_matrix.LANG_RELEASE_MATRIX[lang]: jobset.message('SKIPPED', '%s for %s is not defined' % (args.release, lang), do_newline=True) return [] releases = [args.release] # Images tuples keyed by runtime. images = {} for runtime in client_matrix.LANG_RUNTIME_MATRIX[lang]: image_path = '%s/grpc_interop_%s' % (args.gcr_path, runtime) output = subprocess.check_output(['gcloud', 'beta', 'container', 'images', 'list-tags', '--format=json', image_path]) docker_image_list = json.loads(output) # All images should have a single tag or no tag. tags = [i['tags'][0] for i in docker_image_list if i['tags']] jobset.message('START', 'Found images for %s: %s' % (image_path, tags), do_newline=True) skipped = len(docker_image_list) - len(tags) jobset.message('START', 'Skipped images (no-tag/unknown-tag): %d' % skipped, do_newline=True) # Filter tags based on the releases. images[runtime] = [(tag,'%s:%s' % (image_path,tag)) for tag in tags if tag in releases] return images
def run_tests_for_lang(lang, runtime, images): """Find and run all test cases for a language. images is a list of (<release-tag>, <image-full-path>) tuple. """ for image_tuple in images: release, image = image_tuple jobset.message('START', 'Testing %s' % image, do_newline=True) # Download the docker image before running each test case. subprocess.check_call(['gcloud', 'docker', '--', 'pull', image]) _docker_images_cleanup.append(image) job_spec_list = find_test_cases(lang,release) num_failures, resultset = jobset.run(job_spec_list, newline_on_success=True, add_env={'docker_image':image}, maxjobs=args.jobs) if num_failures: jobset.message('FAILED', 'Some tests failed', do_newline=True) else: jobset.message('SUCCESS', 'All tests passed', do_newline=True) report_utils.append_junit_xml_results( _xml_report_tree, resultset, 'grpc_interop_matrix', '%s__%s %s'%(lang,runtime,release), str(uuid.uuid4()))
def prepare_remote_hosts(hosts, prepare_local=False): """Prepares remote hosts (and maybe prepare localhost as well).""" prepare_timeout = 5*60 prepare_jobs = [] for host in hosts: user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, host) prepare_jobs.append( jobset.JobSpec( cmdline=['tools/run_tests/performance/remote_host_prepare.sh'], shortname='remote_host_prepare.%s' % host, environ = {'USER_AT_HOST': user_at_host}, timeout_seconds=prepare_timeout)) if prepare_local: # Prepare localhost as well prepare_jobs.append( jobset.JobSpec( cmdline=['tools/run_tests/performance/kill_workers.sh'], shortname='local_prepare', timeout_seconds=prepare_timeout)) jobset.message('START', 'Preparing hosts.', do_newline=True) num_failures, _ = jobset.run( prepare_jobs, newline_on_success=True, maxjobs=10) if num_failures == 0: jobset.message('SUCCESS', 'Prepare step completed successfully.', do_newline=True) else: jobset.message('FAILED', 'Failed to prepare remote hosts.', do_newline=True) sys.exit(1)
def archive_repo(languages): """Archives local version of repo including submodules.""" cmdline = ['tar', '-cf', '../grpc.tar', '../grpc/'] if 'java' in languages: cmdline.append('../grpc-java') if 'go' in languages: cmdline.append('../grpc-go') archive_job = jobset.JobSpec(cmdline=cmdline, shortname='archive_repo', timeout_seconds=3 * 60) jobset.message('START', 'Archiving local repository.', do_newline=True) num_failures, _ = jobset.run([archive_job], newline_on_success=True, maxjobs=1) if num_failures == 0: jobset.message('SUCCESS', 'Archive with local repository created successfully.', do_newline=True) else: jobset.message('FAILED', 'Failed to archive local repository.', do_newline=True) sys.exit(1)
def _pull_images_for_lang(lang, images): """Pull all images for given lang from container registry.""" jobset.message( 'START', 'Downloading images for language "%s"' % lang, do_newline=True) download_specs = [] for release, image in images: # Pull the image and warm it up. # First time we use an image with "docker run", it takes time to unpack # the image and later this delay would fail our test cases. cmdline = [ 'time gcloud docker -- pull %s && time docker run --rm=true %s /bin/true' % (image, image) ] spec = jobset.JobSpec( cmdline=cmdline, shortname='pull_image_%s' % (image), timeout_seconds=_PULL_IMAGE_TIMEOUT_SECONDS, shell=True) download_specs.append(spec) # too many image downloads at once tend to get stuck max_pull_jobs = min(args.jobs, _MAX_PARALLEL_DOWNLOADS) num_failures, resultset = jobset.run( download_specs, newline_on_success=True, maxjobs=max_pull_jobs) if num_failures: jobset.message( 'FAILED', 'Failed to download some images', do_newline=True) return False else: jobset.message( 'SUCCESS', 'All images downloaded successfully.', do_newline=True) return True
def build_on_remote_hosts(hosts, languages=scenario_config.LANGUAGES.keys(), build_local=False): """Builds performance worker on remote hosts (and maybe also locally).""" build_timeout = 15 * 60 build_jobs = [] for host in hosts: user_at_host = "%s@%s" % (_REMOTE_HOST_USERNAME, host) build_jobs.append( jobset.JobSpec( cmdline=["tools/run_tests/performance/remote_host_build.sh"] + languages, shortname="remote_host_build.%s" % host, environ={"USER_AT_HOST": user_at_host, "CONFIG": "opt"}, timeout_seconds=build_timeout, ) ) if build_local: # Build locally as well build_jobs.append( jobset.JobSpec( cmdline=["tools/run_tests/performance/build_performance.sh"] + languages, shortname="local_build", environ={"CONFIG": "opt"}, timeout_seconds=build_timeout, ) ) jobset.message("START", "Building.", do_newline=True) num_failures, _ = jobset.run(build_jobs, newline_on_success=True, maxjobs=10) if num_failures == 0: jobset.message("SUCCESS", "Built successfully.", do_newline=True) else: jobset.message("FAILED", "Build failed.", do_newline=True) sys.exit(1)
def build_on_remote_hosts(hosts, build_local=False): """Builds performance worker on remote hosts.""" build_timeout = 15*60 build_jobs = [] for host in hosts: user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, host) build_jobs.append( jobset.JobSpec( cmdline=['tools/run_tests/performance/remote_host_build.sh'], shortname='remote_host_build.%s' % host, environ = {'USER_AT_HOST': user_at_host, 'CONFIG': 'opt'}, timeout_seconds=build_timeout)) if build_local: # Build locally as well build_jobs.append( jobset.JobSpec( cmdline=['tools/run_tests/performance/build_performance.sh'], shortname='local_build', environ = {'CONFIG': 'opt'}, timeout_seconds=build_timeout)) jobset.message('START', 'Building on remote hosts.', do_newline=True) num_failures, _ = jobset.run( build_jobs, newline_on_success=True, maxjobs=10) if num_failures == 0: jobset.message('SUCCESS', 'Build on remote hosts was successful.', do_newline=True) else: jobset.message('FAILED', 'Failed to build on remote hosts.', do_newline=True) sys.exit(1)
def build_on_remote_hosts(hosts, languages=scenario_config.LANGUAGES.keys(), build_local=False): """Builds performance worker on remote hosts (and maybe also locally).""" build_timeout = 15*60 build_jobs = [] for host in hosts: user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, host) build_jobs.append( jobset.JobSpec( cmdline=['tools/run_tests/performance/remote_host_build.sh'] + languages, shortname='remote_host_build.%s' % host, environ = {'USER_AT_HOST': user_at_host, 'CONFIG': 'opt'}, timeout_seconds=build_timeout)) if build_local: # Build locally as well build_jobs.append( jobset.JobSpec( cmdline=['tools/run_tests/performance/build_performance.sh'] + languages, shortname='local_build', environ = {'CONFIG': 'opt'}, timeout_seconds=build_timeout)) jobset.message('START', 'Building.', do_newline=True) num_failures, _ = jobset.run( build_jobs, newline_on_success=True, maxjobs=10) if num_failures == 0: jobset.message('SUCCESS', 'Built successfully.', do_newline=True) else: jobset.message('FAILED', 'Build failed.', do_newline=True) sys.exit(1)
def pull_images_for_lang(lang, images): """Pull all images for given lang from container registry.""" jobset.message('START', 'Downloading images for language "%s"' % lang, do_newline=True) download_specs = [] for release, image in images: spec = jobset.JobSpec(cmdline=['gcloud docker -- pull %s' % image], shortname='pull_image_%s' % (image), timeout_seconds=_PULL_IMAGE_TIMEOUT_SECONDS, shell=True) download_specs.append(spec) num_failures, resultset = jobset.run(download_specs, newline_on_success=True, maxjobs=args.jobs) if num_failures: jobset.message('FAILED', 'Failed to download some images', do_newline=True) return False else: jobset.message('SUCCESS', 'All images downloaded successfully.', do_newline=True) return True
def prepare_remote_hosts(hosts, prepare_local=False): """Prepares remote hosts (and maybe prepare localhost as well).""" prepare_timeout = 5 * 60 prepare_jobs = [] for host in hosts: user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, host) prepare_jobs.append( jobset.JobSpec( cmdline=['tools/run_tests/performance/remote_host_prepare.sh'], shortname='remote_host_prepare.%s' % host, environ={'USER_AT_HOST': user_at_host}, timeout_seconds=prepare_timeout)) if prepare_local: # Prepare localhost as well prepare_jobs.append( jobset.JobSpec( cmdline=['tools/run_tests/performance/kill_workers.sh'], shortname='local_prepare', timeout_seconds=prepare_timeout)) jobset.message('START', 'Preparing hosts.', do_newline=True) num_failures, _ = jobset.run(prepare_jobs, newline_on_success=True, maxjobs=10) if num_failures == 0: jobset.message('SUCCESS', 'Prepare step completed successfully.', do_newline=True) else: jobset.message('FAILED', 'Failed to prepare remote hosts.', do_newline=True) sys.exit(1)
def _pull_images_for_lang(lang, images): """Pull all images for given lang from container registry.""" jobset.message('START', 'Downloading images for language "%s"' % lang, do_newline=True) download_specs = [] for release, image in images: # Pull the image and warm it up. # First time we use an image with "docker run", it takes time to unpack # the image and later this delay would fail our test cases. cmdline = [ 'time gcloud docker -- pull %s && time docker run --rm=true %s /bin/true' % (image, image) ] spec = jobset.JobSpec(cmdline=cmdline, shortname='pull_image_%s' % (image), timeout_seconds=_PULL_IMAGE_TIMEOUT_SECONDS, shell=True, flake_retries=2) download_specs.append(spec) # too many image downloads at once tend to get stuck max_pull_jobs = min(args.jobs, _MAX_PARALLEL_DOWNLOADS) num_failures, resultset = jobset.run(download_specs, newline_on_success=True, maxjobs=max_pull_jobs) if num_failures: jobset.message('FAILED', 'Failed to download some images', do_newline=True) return False else: jobset.message('SUCCESS', 'All images downloaded successfully.', do_newline=True) return True
def maybe_apply_patches_on_git_tag(stack_base, lang, release): files_to_patch = [] for release_info in client_matrix.LANG_RELEASE_MATRIX[lang]: if client_matrix.get_release_tag_name(release_info) == release: if release_info[release] is not None: files_to_patch = release_info[release].get('patch') break if not files_to_patch: return patch_file_relative_path = 'patches/%s_%s/git_repo.patch' % (lang, release) patch_file = os.path.abspath( os.path.join(os.path.dirname(__file__), patch_file_relative_path)) if not os.path.exists(patch_file): jobset.message('FAILED', 'expected patch file |%s| to exist' % patch_file) sys.exit(1) subprocess.check_output( ['git', 'apply', patch_file], cwd=stack_base, stderr=subprocess.STDOUT) # TODO(jtattermusch): this really would need simplification and refactoring # - "git add" and "git commit" can easily be done in a single command # - it looks like the only reason for the existence of the "files_to_patch" # entry is to perform "git add" - which is clumsy and fragile. # - we only allow a single patch with name "git_repo.patch". A better design # would be to allow multiple patches that can have more descriptive names. for repo_relative_path in files_to_patch: subprocess.check_output( ['git', 'add', repo_relative_path], cwd=stack_base, stderr=subprocess.STDOUT) subprocess.check_output( [ 'git', 'commit', '-m', ('Hack performed on top of %s git ' 'tag in order to build and run the %s ' 'interop tests on that tag.' % (lang, release)) ], cwd=stack_base, stderr=subprocess.STDOUT)
def run_collect_perf_profile_jobs(hosts_and_base_names, scenario_name): perf_report_jobs = [] global profile_output_files for host_and_port in hosts_and_base_names: perf_base_name = hosts_and_base_names[host_and_port] output_filename = '%s-%s' % (scenario_name, perf_base_name) # from the base filename, create .svg output filename host = host_and_port.split(':')[0] profile_output_files.append('%s.svg' % output_filename) perf_report_jobs.append( perf_report_processor_job(host, perf_base_name, output_filename)) jobset.message('START', 'Collecting perf reports from qps workers', do_newline=True) failures, _ = jobset.run(perf_report_jobs, newline_on_success=True, maxjobs=1) jobset.message('END', 'Collecting perf reports from qps workers', do_newline=True) return failures
def find_test_cases(lang, runtime, release, suite_name): """Returns the list of test cases from testcase files per lang/release.""" file_tmpl = os.path.join(os.path.dirname(__file__), 'testcases/%s__%s') testcase_release = release filename_prefix = lang if lang == 'csharp': filename_prefix = runtime if not os.path.exists(file_tmpl % (filename_prefix, release)): testcase_release = 'master' testcases = file_tmpl % (filename_prefix, testcase_release) job_spec_list = [] try: with open(testcases) as f: # Only line start with 'docker run' are test cases. for line in f.readlines(): if line.startswith('docker run'): m = re.search('--test_case=(.*)"', line) shortname = m.group(1) if m else 'unknown_test' m = re.search( '--server_host_override=(.*).sandbox.googleapis.com', line) server = m.group(1) if m else 'unknown_server' spec = jobset.JobSpec( cmdline=line, shortname='%s:%s:%s:%s' % (suite_name, lang, server, shortname), timeout_seconds=_TEST_TIMEOUT, shell=True, flake_retries=5 if args.allow_flakes else 0) job_spec_list.append(spec) jobset.message( 'START', 'Loaded %s tests from %s' % (len(job_spec_list), testcases), do_newline=True) except IOError as err: jobset.message('FAILED', err, do_newline=True) return job_spec_list
def maybe_apply_patches_on_git_tag(stack_base, lang, release): files_to_patch = [] for release_info in client_matrix.LANG_RELEASE_MATRIX[lang]: if client_matrix.get_release_tag_name(release_info) == release: if release_info[release] is not None: files_to_patch = release_info[release].get('patch') break if not files_to_patch: return patch_file_relative_path = 'patches/%s_%s/git_repo.patch' % (lang, release) patch_file = os.path.abspath( os.path.join(os.path.dirname(__file__), patch_file_relative_path)) if not os.path.exists(patch_file): jobset.message('FAILED', 'expected patch file |%s| to exist' % patch_file) sys.exit(1) subprocess.check_output(['git', 'apply', patch_file], cwd=stack_base, stderr=subprocess.STDOUT) # TODO(jtattermusch): this really would need simplification and refactoring # - "git add" and "git commit" can easily be done in a single command # - it looks like the only reason for the existence of the "files_to_patch" # entry is to perform "git add" - which is clumsy and fragile. # - we only allow a single patch with name "git_repo.patch". A better design # would be to allow multiple patches that can have more descriptive names. for repo_relative_path in files_to_patch: subprocess.check_output(['git', 'add', repo_relative_path], cwd=stack_base, stderr=subprocess.STDOUT) subprocess.check_output([ 'git', 'commit', '-m', ('Hack performed on top of %s git ' 'tag in order to build and run the %s ' 'interop tests on that tag.' % (lang, release)) ], cwd=stack_base, stderr=subprocess.STDOUT)
def run(): pid = os.getpid() msg = str(pid) parser = argparse.ArgumentParser(description='Run Server on PORT') parser.add_argument('-P', metavar='P', type=int, nargs='+', help='an integer for gRPC Server port') args = parser.parse_args() if args and args.P: port = args.P[-1] jobset.message('START', 'Run hello on port %s' % port, do_newline=True) c = get_client() start = time.time() tt = int(total / cpu_count) for i in range(tt): r = c.hello(msg) assert msg in str(r) end = time.time() diff = end - start qps = total / diff jobset.message('SUCCESS', 'Done hello total=%s, ' 'time diff=%s, qps=%s' % ( total, diff, qps), do_newline=True)
def checkout_grpc_stack(lang, release): """Invokes 'git check' for the lang/release and returns directory created.""" assert args.git_checkout and args.git_checkout_root if not os.path.exists(args.git_checkout_root): os.makedirs(args.git_checkout_root) repo = client_matrix.get_github_repo(lang) # Get the subdir name part of repo # For example, '[email protected]:grpc/grpc-go.git' should use 'grpc-go'. repo_dir = os.path.splitext(os.path.basename(repo))[0] stack_base = os.path.join(args.git_checkout_root, repo_dir) # Assume the directory is reusable for git checkout. if not os.path.exists(stack_base): subprocess.check_call(['git', 'clone', '--recursive', repo], cwd=os.path.dirname(stack_base)) # git checkout. jobset.message('START', 'git checkout %s from %s' % (release, stack_base), do_newline=True) # We should NEVER do checkout on current tree !!! assert not os.path.dirname(__file__).startswith(stack_base) output = subprocess.check_output(['git', 'checkout', release], cwd=stack_base, stderr=subprocess.STDOUT) commit_log = subprocess.check_output(['git', 'log', '-1'], cwd=stack_base) jobset.message('SUCCESS', 'git checkout', output + commit_log, do_newline=True) # Write git log to commit_log so it can be packaged with the docker image. with open(os.path.join(stack_base, 'commit_log'), 'w') as f: f.write(commit_log) return stack_base
def run_tests_for_lang(lang, runtime, images): """Find and run all test cases for a language. images is a list of (<release-tag>, <image-full-path>) tuple. """ # Fine to ignore return value as failure to download will result in test failure # later anyway. pull_images_for_lang(lang, images) total_num_failures = 0 for release, image in images: jobset.message('START', 'Testing %s' % image, do_newline=True) suite_name = '%s__%s_%s' % (lang, runtime, release) job_spec_list = find_test_cases(lang, runtime, release, suite_name) if not job_spec_list: jobset.message('FAILED', 'No test cases were found.', do_newline=True) return 1 num_failures, resultset = jobset.run(job_spec_list, newline_on_success=True, add_env={'docker_image': image}, maxjobs=args.jobs) if args.bq_result_table and resultset: upload_test_results.upload_interop_results_to_bq( resultset, args.bq_result_table, args) if num_failures: jobset.message('FAILED', 'Some tests failed', do_newline=True) total_num_failures += num_failures else: jobset.message('SUCCESS', 'All tests passed', do_newline=True) report_utils.append_junit_xml_results(_xml_report_tree, resultset, 'grpc_interop_matrix', suite_name, str(uuid.uuid4())) if not args.keep: cleanup(image) return total_num_failures
def run_tests_for_lang(lang, runtime, images): """Find and run all test cases for a language. images is a list of (<release-tag>, <image-full-path>) tuple. """ total_num_failures = 0 for image_tuple in images: release, image = image_tuple jobset.message('START', 'Testing %s' % image, do_newline=True) # Download the docker image before running each test case. subprocess.check_call(['gcloud', 'docker', '--', 'pull', image]) suite_name = '%s__%s_%s' % (lang, runtime, release) job_spec_list = find_test_cases(lang, runtime, release, suite_name) if not job_spec_list: jobset.message( 'FAILED', 'No test cases were found.', do_newline=True) return 1 num_failures, resultset = jobset.run( job_spec_list, newline_on_success=True, add_env={'docker_image': image}, maxjobs=args.jobs) if args.bq_result_table and resultset: upload_test_results.upload_interop_results_to_bq( resultset, args.bq_result_table, args) if num_failures: jobset.message('FAILED', 'Some tests failed', do_newline=True) total_num_failures += num_failures else: jobset.message('SUCCESS', 'All tests passed', do_newline=True) report_utils.append_junit_xml_results(_xml_report_tree, resultset, 'grpc_interop_matrix', suite_name, str(uuid.uuid4())) if not args.keep: cleanup(image) return total_num_failures
def run_tests_for_lang(lang, runtime, images): """Find and run all test cases for a language. images is a list of (<release-tag>, <image-full-path>) tuple. """ total_num_failures = 0 for image_tuple in images: release, image = image_tuple jobset.message('START', 'Testing %s' % image, do_newline=True) # Download the docker image before running each test case. subprocess.check_call(['gcloud', 'docker', '--', 'pull', image]) suite_name = '%s__%s_%s' % (lang, runtime, release) job_spec_list = find_test_cases(lang, runtime, release, suite_name) if not job_spec_list: jobset.message('FAILED', 'No test cases were found.', do_newline=True) return 1 num_failures, resultset = jobset.run(job_spec_list, newline_on_success=True, add_env={'docker_image': image}, maxjobs=args.jobs) if args.bq_result_table and resultset: upload_test_results.upload_interop_results_to_bq( resultset, args.bq_result_table) if num_failures: jobset.message('FAILED', 'Some tests failed', do_newline=True) total_num_failures += num_failures else: jobset.message('SUCCESS', 'All tests passed', do_newline=True) report_utils.append_junit_xml_results(_xml_report_tree, resultset, 'grpc_interop_matrix', suite_name, str(uuid.uuid4())) if not args.keep: cleanup(image) return total_num_failures
def build_all_images_for_release(lang, release): """Build all docker images for a release across all runtimes.""" docker_images = [] build_jobs = [] env = {} # If we not using current tree or the sibling for grpc stack, do checkout. stack_base = '' if args.git_checkout: stack_base = checkout_grpc_stack(lang, release) var = { 'go': 'GRPC_GO_ROOT', 'java': 'GRPC_JAVA_ROOT', 'node': 'GRPC_NODE_ROOT' }.get(lang, 'GRPC_ROOT') env[var] = stack_base for runtime in client_matrix.LANG_RUNTIME_MATRIX[lang]: job = build_image_jobspec(runtime, env, release, stack_base) docker_images.append(job.tag) build_jobs.append(job) jobset.message('START', 'Building interop docker images.', do_newline=True) print('Jobs to run: \n%s\n' % '\n'.join(str(j) for j in build_jobs)) num_failures, _ = jobset.run( build_jobs, newline_on_success=True, maxjobs=multiprocessing.cpu_count()) if num_failures: jobset.message( 'FAILED', 'Failed to build interop docker images.', do_newline=True) docker_images_cleanup.extend(docker_images) sys.exit(1) jobset.message( 'SUCCESS', 'All docker images built for %s at %s.' % (lang, release), do_newline=True) if release != 'master': commit_log = os.path.join(stack_base, 'commit_log') if os.path.exists(commit_log): for image in docker_images: add_files_to_image(image, [commit_log], 'release=%s' % release) return docker_images
def archive_repo(languages): """Archives local version of repo including submodules.""" cmdline = ["tar", "-cf", "../grpc.tar", "../grpc/"] if "java" in languages: cmdline.append("../grpc-java") if "go" in languages: cmdline.append("../grpc-go") archive_job = jobset.JobSpec(cmdline=cmdline, shortname="archive_repo", timeout_seconds=3 * 60) jobset.message("START", "Archiving local repository.", do_newline=True) num_failures, _ = jobset.run([archive_job], newline_on_success=True, maxjobs=1) if num_failures == 0: jobset.message("SUCCESS", "Archive with local repository created successfully.", do_newline=True) else: jobset.message("FAILED", "Failed to archive local repository.", do_newline=True) sys.exit(1)
def archive_repo(): """Archives local version of repo including submodules.""" # TODO: also archive grpc-go and grpc-java repos archive_job = jobset.JobSpec( cmdline=['tar', '-cf', '../grpc.tar', '../grpc/'], shortname='archive_repo', timeout_seconds=3*60) jobset.message('START', 'Archiving local repository.', do_newline=True) num_failures, _ = jobset.run( [archive_job], newline_on_success=True, maxjobs=1) if num_failures == 0: jobset.message('SUCCESS', 'Archive with local repository create successfully.', do_newline=True) else: jobset.message('FAILED', 'Failed to archive local repository.', do_newline=True) sys.exit(1)
def prepare_remote_hosts(hosts): """Prepares remote hosts.""" prepare_jobs = [] for host in hosts: user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, host) prepare_jobs.append( jobset.JobSpec( cmdline=['tools/run_tests/performance/remote_host_prepare.sh'], shortname='remote_host_prepare.%s' % host, environ = {'USER_AT_HOST': user_at_host}, timeout_seconds=3*60)) jobset.message('START', 'Preparing remote hosts.', do_newline=True) num_failures, _ = jobset.run( prepare_jobs, newline_on_success=True, maxjobs=10) if num_failures == 0: jobset.message('SUCCESS', 'Remote hosts ready to start build.', do_newline=True) else: jobset.message('FAILED', 'Failed to prepare remote hosts.', do_newline=True) sys.exit(1)
test_cache.maybe_load() if forever: success = True while True: dw = watch_dirs.DirWatcher(['src', 'include', 'test', 'examples']) initial_time = dw.most_recent_change() have_files_changed = lambda: dw.most_recent_change() != initial_time previous_success = success success = _build_and_run(check_cancelled=have_files_changed, newline_on_success=False, travis=args.travis, cache=test_cache) == 0 if not previous_success and success: jobset.message('SUCCESS', 'All tests are now passing properly', do_newline=True) jobset.message('IDLE', 'No change detected') while not have_files_changed(): time.sleep(1) else: result = _build_and_run(check_cancelled=lambda: False, newline_on_success=args.newline_on_success, travis=args.travis, cache=test_cache, xml_report=args.xml_report) if result == 0: jobset.message('SUCCESS', 'All tests passed', do_newline=True) else: jobset.message('FAILED', 'Some tests failed', do_newline=True) sys.exit(result)