def _check_code_style(targets): cpplint = config.get_item('cc_config', 'cpplint') if not cpplint: console.info('cpplint disabled') return 0 changed_files = _get_changed_files(targets, _BLADE_ROOT_DIR, _WORKING_DIR) if not changed_files: return 0 console.info('Begin to check code style for changed source code') p = subprocess.Popen(('%s %s' % (cpplint, ' '.join(changed_files))), shell=True) try: p.wait() if p.returncode != 0: if p.returncode == 127: msg = ( "Can't execute '{0}' to check style, you can config the " "'cpplint' option to be a valid cpplint path in the " "'cc_config' section of blade.conf or BLADE_ROOT, or " "make sure '{0}' command is correct.").format(cpplint) else: msg = 'Please fixing style warnings before submitting the code!' console.warning(msg) except KeyboardInterrupt, e: console.error(str(e)) return 1
def _generate_cc_link_rules(self, ld, linkflags): self._add_line('linkflags = %s' % ' '.join(config.get_item('cc_config', 'linkflags'))) self._add_line('intrinsic_linkflags = %s\n' % ' '.join(linkflags)) link_jobs = config.get_item('link_config', 'link_jobs') if link_jobs: link_jobs = min(link_jobs, self.blade.build_jobs_num()) console.info('Adjust parallel link jobs number to %s' % link_jobs) pool = 'link_pool' self._add_line( textwrap.dedent('''\ pool %s depth = %s''') % (pool, link_jobs)) else: pool = None link_args = '-o ${out} ${intrinsic_linkflags} ${linkflags} ${target_linkflags} ${in} ${extra_linkflags}' self.generate_rule(name='link', command=ld + ' ' + link_args, description='LINK BINARY ${out}', pool=pool) self.generate_rule(name='solink', command=ld + ' -shared ' + link_args, description='LINK SHARED ${out}', pool=pool)
def _run_job_redirect(self, job, job_thread): """run job and redirect the output. """ target, run_dir, test_env, cmd = job test_name = target.fullname shell = target.data.get('run_in_shell', False) if shell: cmd = subprocess.list2cmdline(cmd) timeout = target.data.get('test_timeout') self._show_progress(cmd) p = subprocess.Popen(cmd, env=test_env, cwd=run_dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True, shell=shell) job_thread.set_job_data(p, test_name, timeout) stdout = p.communicate()[0] result = self._get_result(p.returncode) msg = 'Output of //%s:\n%s\n//%s finished: %s\n' % ( test_name, stdout, test_name, result) if console.verbosity_le('quiet') and p.returncode != 0: console.error(msg, prefix=False) else: console.info(msg) console.flush() return p.returncode
def _generate_java_coverage_report(self): java_test_config = config.get_section('java_test_config') jacoco_home = java_test_config['jacoco_home'] coverage_reporter = java_test_config['coverage_reporter'] if not jacoco_home or not coverage_reporter: console.warning('Missing jacoco home or coverage report generator ' 'in global configuration. ' 'Abort java coverage report generation.') return jacoco_libs = os.path.join(jacoco_home, 'lib', 'jacocoant.jar') report_dir = os.path.join(self.build_dir, 'java', 'coverage_report') if not os.path.exists(report_dir): os.makedirs(report_dir) coverage_data = self._get_java_coverage_data() if coverage_data: cmd = ['java -classpath %s:%s com.tencent.gdt.blade.ReportGenerator' % ( coverage_reporter, jacoco_libs)] cmd.append(report_dir) for data in coverage_data: cmd.append(','.join(data)) cmd_str = ' '.join(cmd) console.info('Generating java coverage report') console.info(cmd_str) if subprocess.call(cmd_str, shell=True): console.warning('Failed to generate java coverage report')
def _show_skipped_tests(self): """Show tests skipped. """ if self.skipped_tests: console.info('%d skipped tests:' % len(self.skipped_tests)) self.skipped_tests.sort() for key in self.skipped_tests: console.info('//%s:%s' % key, prefix=False)
def _collect_test_jobs(self): """Get incremental test run list. """ for target in self._build_targets.values(): if not target.type.endswith('_test'): continue if self._exclude_test(target): console.info('//%s is skipped due to --exclude-test' % target.fullname) self.excluded_tests.append(target.key) continue binary_md5, testdata_md5 = self._get_test_target_md5sum(target) history = self.test_history['items'].get(target.key) reason = self._run_reason(target, history, binary_md5, testdata_md5) if reason: self.test_jobs[target.key] = TestJob(reason=reason, binary_md5=binary_md5, testdata_md5=testdata_md5, env_md5=self.env_md5, args=self.options.args) else: if history.result.exit_code == 0: self.unchanged_tests.append(target.key) else: self.unrepaired_tests.append(target.key) self.unrepaired_tests.sort( key=lambda x: self.test_history['items'][x].first_fail_time, reverse=True)
def _generate_java_coverage_report(self): java_test_config = config.get_section('java_test_config') jacoco_home = java_test_config['jacoco_home'] coverage_reporter = java_test_config['coverage_reporter'] if not jacoco_home or not coverage_reporter: console.warning('Missing jacoco home or coverage report generator ' 'in global configuration. ' 'Abort java coverage report generation.') return jacoco_libs = os.path.join(jacoco_home, 'lib', 'jacocoant.jar') report_dir = os.path.join(self.build_dir, 'java', 'coverage_report') if not os.path.exists(report_dir): os.makedirs(report_dir) coverage_data = self._get_java_coverage_data() if coverage_data: java = 'java' java_home = config.get_item('java_config', 'java_home') if java_home: java = os.path.join(java_home, 'bin', 'java') cmd = [ '%s -classpath %s:%s com.tencent.gdt.blade.ReportGenerator' % (java, coverage_reporter, jacoco_libs) ] cmd.append(report_dir) for data in coverage_data: cmd.append(','.join(data)) cmd_str = ' '.join(cmd) console.info('Generating java coverage report') console.debug(cmd_str) if subprocess.call(cmd_str, shell=True) != 0: console.warning('Failed to generate java coverage report')
def schedule_jobs(self): """scheduler. """ if not self.tests_list: return num_of_workers = self._get_workers_num() console.info('spawn %d worker(s) to run tests' % num_of_workers) for i in self.tests_list: target = i[0] if target.data.get('exclusive'): self.exclusive_job_queue.put(i) else: self.job_queue.put(i) quiet = console.verbosity_le('quiet') redirect = num_of_workers > 1 or quiet threads = [] for i in range(num_of_workers): t = WorkerThread(i, self.job_queue, self._process_job, redirect) t.start() threads.append(t) self._wait_worker_threads(threads) if not self.exclusive_job_queue.empty(): console.info('spawn 1 worker to run exclusive tests') last_t = WorkerThread(num_of_workers, self.exclusive_job_queue, self._process_job, quiet) last_t.start() self._wait_worker_threads([last_t])
def _download_dependency(self, id, classifier, target): group, artifact, version = id.split(':') target_path = self._generate_jar_path(id) classpath = 'classpath.txt' log = 'classpath.log' log = os.path.join(target_path, log) if not self._need_download(os.path.join(target_path, classpath), version, log): return True # if classifier: # id = '%s:%s' % (id, classifier) # # Currently analyzing dependencies of classifier jar # # usually fails. Here when there is no classpath.txt # # file but classpath.log exists, that means the failure # # of analyzing dependencies last time # if (not os.path.exists(os.path.join(target_path, classpath)) # and os.path.exists(log)): # return False console.info('Downloading %s dependencies...' % id) pom = os.path.join(target_path, artifact + '-' + version + '.pom') cmd = ' '.join([ self.__maven, 'dependency:build-classpath', '-DincludeScope=runtime', '-Dmdep.outputFile=%s' % classpath ]) cmd += ' -e -X -f %s > %s' % (pom, log) if subprocess.call(cmd, shell=True) != 0: target.warning('Error occurred when resolving %s dependencies. ' 'Check %s for details.' % (id, log)) return False return True
def _generate_cc_link_rules(self, ld, ldflags): ldflags = config.get_item('cc_config', 'linkflags') + ldflags link_jobs = config.get_item('link_config', 'link_jobs') if link_jobs: link_jobs = min(link_jobs, self.blade.build_jobs_num()) console.info('Adjust parallel link jobs number to %s' % link_jobs) pool = 'link_pool' self._add_rule( textwrap.dedent('''\ pool %s depth = %s''') % (pool, link_jobs)) else: pool = None self.generate_rule( name='link', command='%s -o ${out} %s ${ldflags} ${in} ${extra_ldflags}' % (ld, ' '.join(ldflags)), description='LINK ${out}', pool=pool) self.generate_rule( name='solink', command='%s -o ${out} -shared %s ${ldflags} ${in} ${extra_ldflags}' % (ld, ' '.join(ldflags)), description='SHAREDLINK ${out}', pool=pool)
def _show_tests_summary(self, passed_run_results, failed_run_results): """Show tests summary. """ self._show_banner('Testing Summary') console.info('%d tests scheduled to run by scheduler.' % (len(self.test_jobs))) if self.skipped_tests: console.info('%d tests skipped when doing incremental test.' % len(self.skipped_tests)) console.info('You can specify --full-test to run all tests.') run_tests = len(passed_run_results) + len(failed_run_results) total = len(self.test_jobs) + len(self.unrepaired_tests) + len(self.skipped_tests) msg = ['Total %d tests' % total] if self.skipped_tests: msg.append('%d skipped' % len(self.skipped_tests)) if passed_run_results: msg.append('%d passed' % len(passed_run_results)) if failed_run_results: msg.append('%d failed' % len(failed_run_results)) cancelled_tests = len(self.test_jobs) - run_tests if cancelled_tests: msg.append('%d cancelled' % cancelled_tests) if self.unrepaired_tests: msg.append('%d unrepaired' % len(self.unrepaired_tests)) console.info(', '.join(msg) + '.') msg = [] if self.repaired_tests: msg.append('%d repaired' % len(self.repaired_tests)) if self.new_failed_tests: msg.append('%d new failed' % len(self.new_failed_tests)) if msg: console.info('Trend: '+ ', '.join(msg) + '.') if self._is_full_success(passed_run_results): console.notice('All %d tests passed!' % total)
def parallel_jobs_num(self): """Tune the jobs num. """ # User has the highest priority user_jobs_num = self.__options.jobs if user_jobs_num > 0: return user_jobs_num # Calculate job numbers smartly jobs_num = 0 distcc_enabled = config.get_item('distcc_config', 'enabled') if distcc_enabled and self.build_environment.distcc_env_prepared: # Distcc cost doesn;t much local cpu, jobs can be quite large. distcc_num = len(self.build_environment.get_distcc_hosts_list()) jobs_num = min(max(int(1.5 * distcc_num), 1), 20) else: cpu_core_num = cpu_count() # machines with cpu_core_num > 4 is usually shared by multiple users, # set an upper bound to avoid interfering other users jobs_num = min(2 * cpu_core_num, 8) if jobs_num != user_jobs_num: console.info('tunes the parallel jobs number(-j N) to be %d' % (jobs_num)) return jobs_num
def _generate_cc_link_rules(self, ld, linkflags): self._add_line('linkflags = %s' % ' '.join(config.get_item('cc_config', 'linkflags'))) self._add_line('intrinsic_linkflags = %s\n' % ' '.join(linkflags)) link_jobs = config.get_item('link_config', 'link_jobs') if link_jobs: link_jobs = min(link_jobs, self.blade.build_jobs_num()) console.info('Adjust parallel link jobs number to %s' % link_jobs) pool = 'link_pool' self._add_line( textwrap.dedent('''\ pool %s depth = %s''') % (pool, link_jobs)) else: pool = None # Linking might have a lot of object files exceeding maximal length of a bash command line. # Using response file can resolve this problem. # Refer to: https://ninja-build.org/manual.html link_args = '-o ${out} ${intrinsic_linkflags} ${linkflags} ${target_linkflags} @${out}.rsp ${extra_linkflags}' self.generate_rule(name='link', command=ld + ' ' + link_args, rspfile='${out}.rsp', rspfile_content='${in}', description='LINK BINARY ${out}', pool=pool) self.generate_rule(name='solink', command=ld + ' -shared ' + link_args, rspfile='${out}.rsp', rspfile_content='${in}', description='LINK SHARED ${out}', pool=pool)
def download_all(self): """Download all needed maven artifacts""" concurrency = config.get_item('java_config', 'maven_download_concurrency') num_threads = min(self.__to_download.qsize(), concurrency) if num_threads == 0: return console.info('Downloading maven_jars, concurrency=%d ...' % num_threads) threads = [] for i in range(num_threads): thread = threading.Thread(target=self._download_worker) thread.start() threads.append(thread) try: self.__to_download.join() except KeyboardInterrupt: console.error('KeyboardInterrupt') while not self.__to_download.empty(): try: self.__to_download.get_nowait() except queue.Empty: pass finally: console.debug('join threads') for thread in threads: thread.join() console.debug('join threads done') console.info('Downloading maven_jars done.')
def _show_progress(self, cmd): console.info('[%s/%s/%s] Running %s' % (self.num_of_finished_tests, self.num_of_running_tests, len(self.tests_list), cmd)) if console.verbosity_le('quiet'): console.show_progress_bar(self.num_of_finished_tests, len(self.tests_list))
def analyze_targets(self): """Expand the targets.""" console.info('Analyzing dependency graph...') self.__sorted_targets_keys = analyze_deps(self.__build_targets) self.__targets_expanded = True console.info('Analyzing done.') return self.__build_targets # For test
def generate_build_rules(self): """Generate the constructing rules. """ console.info('Generating build rules...') generator = NinjaFileGenerator(self.__build_script, self.__blade_path, self) rules = generator.generate_build_script() self.__all_rule_names = generator.get_all_rule_names() console.info('Generating done.') return rules
def generate_build_rules(self): """Generate the constructing rules. """ console.info('generating build rules...') generator = self.new_build_rules_generator() rules = generator.generate_build_script() self.__all_rule_names = generator.get_all_rule_names() console.info('generating done.') return rules
def generate_build_code(self): """Generate the backend build code.""" console.info('Generating backend build code...') generator = NinjaFileGenerator(self.__build_script, self.__blade_path, self) generator.generate_build_script() self.__all_rule_names = generator.get_all_rule_names() console.info('Generating done.')
def generate_build_rules(self): """Generate the constructing rules. """ console.info('Generating build rules...') generator = self.new_build_rules_generator() rules = generator.generate_build_script() self.__all_rule_names = generator.get_all_rule_names() console.info('Generating done.') return rules
def _build_jobs_num(self): """Calculate build jobs num.""" # User has the highest priority jobs_num = config.get_item('global_config', 'build_jobs') if jobs_num > 0: return jobs_num jobs_num = self.build_accelerator.adjust_jobs_num(cpu_count()) console.info('Adjust build jobs number(-j N) to be %d' % jobs_num) return jobs_num
def analyze_targets(self): """Expand the targets. """ console.info('analyzing dependency graph...') (self.__sorted_targets_keys, self.__depended_targets) = analyze_deps(self.__build_targets) self.__targets_expanded = True console.info('analyzing done.') return self.__build_targets # For test
def try_parse_file(self, filename): """load the configuration file and parse. """ try: self.current_file_name = filename if os.path.exists(filename): console.info('loading config file "%s"' % filename) execfile(filename, _config_globals, None) except SystemExit: console.error_exit('Parse error in config file %s' % filename) finally: self.current_file_name = ''
def build(options): console.info('Building...') console.flush() returncode = _ninja_build(options) if returncode == 0 and not build_manager.instance.verify(): returncode = 1 if returncode != 0: console.error('Build failure.') else: console.info('Build success.') return returncode
def generate_build_code(self): """Generate the backend build code.""" maven_cache = maven.MavenCache.instance(self.__build_dir) maven_cache.download_all() console.info('Generating backend build code...') generator = NinjaFileGenerator(self.__build_script, self.__blade_path, self) generator.generate_build_script() self.__all_rule_names = generator.get_all_rule_names() console.info('Generating done.')
def try_parse_file(self, filename): """load the configuration file and parse. """ try: self.current_file_name = filename if os.path.exists(filename): console.info('Loading config file "%s"' % filename) exec_(filename, _config_globals, None) except SystemExit: console.fatal('Parse error in config file %s' % filename) finally: self.current_file_name = ''
def load_targets(self): """Load the targets. """ console.info('Loading BUILD files...') (self.__direct_targets, self.__expanded_command_targets, self.__build_targets) = load_targets(self.__load_targets, self.__root_dir, self) if self.__command_targets != self.__load_targets: # In query dependents mode, we must use command targets to execute query self.__expanded_command_targets = self._expand_command_targets() console.info('Loading done.') return self.__direct_targets, self.__expanded_command_targets # For test
def _has_load_excluded_file(root, files): """Whether exclude this root directory when loading BUILD.""" if root == '.': return False # 'BLADE_ROOT' file under a subdirectory means it is a nested other workspace. if 'BLADE_ROOT' in files: console.info('Skip nested workspace "%s"' % root) return True if _BLADE_SKIP_FILE in files: console.info('Skip "%s" due to the "%s" file' % (root, _BLADE_SKIP_FILE)) return True return False
def clean(options): console.info('Cleaning...(hint: You can specify --generate-dynamic to ' 'clean shared libraries)') backend_builder = config.get_item('global_config', 'backend_builder') cmd = [backend_builder] # cmd += backend_builder_options(options) cmd.append('-f%s' % build_manager.instance.build_script()) cmd += ['-t', 'clean'] cmdstr = subprocess.list2cmdline(cmd) returncode = _run_backend_builder(cmdstr) console.info('Cleaning done.') return returncode
def generate_build_rules(self): """Generate the constructing rules.""" maven_cache = maven.MavenCache.instance(self.__build_dir) maven_cache.download_all() console.info('Generating build rules...') generator = NinjaFileGenerator(self.__build_script, self.__blade_path, self) rules = generator.generate_build_script() self.__all_rule_names = generator.get_all_rule_names() console.info('Generating done.') return rules
def load_targets(self): """Load the targets. """ console.info('loading BUILDs...') (self.__direct_targets, self.__all_command_targets, self.__build_targets) = load_targets(self.__load_targets, self.__root_dir, self) if self.__command_targets != self.__load_targets: # In query dependents mode, we must use command targets to execute query self.__all_command_targets = self._expand_command_targets() console.info('loading done.') return self.__direct_targets, self.__all_command_targets # For test
def build(options): _check_code_style(_TARGETS) console.info('building...') console.flush() returncode = _ninja_build(options) if returncode != 0: console.error('building failure.') return returncode if not build_manager.instance.verify(): console.error('building failure.') return 1 console.info('building done.') return 0
def build(options): _check_code_style(_TARGETS) console.info('Building...') console.flush() returncode = _ninja_build(options) if not build_manager.instance.verify(): if returncode == 0: returncode = 1 if returncode != 0: console.error('Build failure.') return returncode console.info('Build success.') return 0
def load_targets(self): """Load the targets.""" console.info('Loading BUILD files...') excluded_targets = target_pattern.normalize_str_list( self.__options.exclude_targets, self.__working_dir, ',') (self.__direct_targets, self.__expanded_command_targets, self.__build_targets) = load_targets(self.__load_targets, excluded_targets, self) if self.__command_targets != self.__load_targets: # In query dependents mode, we must use command targets to execute query self.__expanded_command_targets = self._expand_command_targets() console.info('Loading done.') return self.__direct_targets, self.__expanded_command_targets # For test
def clean(options): console.info('cleaning...(hint: please specify --generate-dynamic to ' 'clean your so)') native_builder = config.get_item('global_config', 'native_builder') cmd = [native_builder] # cmd += native_builder_options(options) if native_builder == 'ninja': cmd += ['-t', 'clean'] else: cmd += ['--duplicate=soft-copy', '-c', '-s', '--cache-show'] cmdstr = subprocess.list2cmdline(cmd) returncode = _run_native_builder(cmdstr) console.info('cleaning done.') return returncode
def _show_tests_result(self, passed_run_results, failed_run_results): """Show test details and summary according to the options. """ if self.options.show_details: self._show_banner('Testing Details') self._show_skipped_tests() if passed_run_results: console.info('passed tests:') self._show_run_results(passed_run_results) if self.options.show_tests_slower_than is not None: self._show_slow_tests(passed_run_results, failed_run_results) if failed_run_results: # Always show details of failed tests console.error('failed tests:') self._show_run_results(failed_run_results, is_error=True) self._show_tests_summary(passed_run_results, failed_run_results)
def build(options): _check_code_style(_TARGETS) console.info('building...') console.flush() if config.get_item('global_config', 'native_builder') == 'ninja': returncode = _ninja_build(options) else: returncode = _scons_build(options) if returncode != 0: console.error('building failure.') return returncode if not build_manager.instance.verify(): console.error('building failure.') return 1 console.info('building done.') return 0
def run_target(self, target_name): """Run one single target. """ target_key = tuple(target_name.split(':')) target = self.targets[target_key] if target.type not in self.run_list: console.error_exit('target %s:%s is not a target that could run' % ( target_key[0], target_key[1])) run_env = self._prepare_env(target) cmd = [os.path.abspath(self._executable(target))] + self.options.args shell = target.data.get('run_in_shell', False) if shell: cmd = subprocess.list2cmdline(cmd) console.info("'%s' will be ran" % cmd) sys.stdout.flush() p = subprocess.Popen(cmd, env=run_env, close_fds=True, shell=shell) p.wait() self._clean_env() return p.returncode
def parallel_jobs_num(self): """Tune the jobs num. """ # User has the highest priority user_jobs_num = self.__options.jobs if user_jobs_num > 0: return user_jobs_num # Calculate job numbers smartly distcc_enabled = config.get_item('distcc_config', 'enabled') if distcc_enabled and self.build_environment.distcc_env_prepared: # Distcc doesn't cost much local cpu, jobs can be quite large. distcc_num = len(self.build_environment.get_distcc_hosts_list()) jobs_num = min(max(int(1.5 * distcc_num), 1), 20) else: cpu_core_num = cpu_count() # machines with cpu_core_num > 4 is usually shared by multiple users, # set an upper bound to avoid interfering other users jobs_num = min(2 * cpu_core_num, 8) console.info('tunes the parallel jobs number(-j N) to be %d' % jobs_num) return jobs_num
def main(blade_path): exit_code = 0 try: start_time = time.time() exit_code = _main(blade_path) cost_time = int(time.time() - start_time) if cost_time > 1: console.info('cost time %s' % format_timedelta(cost_time)) except SystemExit as e: # pylint misreport e.code as classobj exit_code = e.code # pylint: disable=redefined-variable-type except KeyboardInterrupt: console.error('keyboard interrupted', -signal.SIGINT) exit_code = -signal.SIGINT except: # pylint: disable=bare-except exit_code = 1 console.error(traceback.format_exc()) if exit_code != 0: console.error('failure') sys.exit(exit_code)
def query(self): """Query the targets. """ output_file_name = self.__options.output_file if output_file_name: output_file_name = os.path.join(self.__working_dir, output_file_name) output_file = open(output_file_name, 'w') console.info('query result will be written to file "%s"' % self.__options.output_file) else: output_file = sys.stdout console.info('query result:') output_format = self.__options.output_format if output_format == 'dot': self.query_dependency_dot(output_file) elif output_format == 'tree': self.query_dependency_tree(output_file) else: self.query_dependency_plain(output_file) if output_file_name: output_file.close() return 0
def _show_tests_summary(self, passed_run_results, failed_run_results): """Show tests summary. """ self._show_banner('Testing Summary') console.info('%d tests scheduled to run by scheduler.' % (len(self.test_jobs))) if self.skipped_tests: console.info('%d tests skipped when doing incremental test.' % len(self.skipped_tests)) console.info('You can specify --full-test to run all tests.') run_tests = len(passed_run_results) + len(failed_run_results) if len(passed_run_results) == len(self.test_jobs): console.notice('All %d tests passed!' % len(passed_run_results)) return msg = ['total %d tests' % len(self.test_jobs)] if passed_run_results: msg.append('%d passed' % len(passed_run_results)) if failed_run_results: msg.append('%d failed' % len(failed_run_results)) cancelled_tests = len(self.test_jobs) - run_tests if cancelled_tests: msg.append('%d cancelled' % cancelled_tests) console.error(', '.join(msg) + '.')
def _run_reason(self, target, binary_md5, testdata_md5): """Return run reason for a given test""" if self._skip_test(target): console.info('%s is skipped by --skip-test' % target.fullname) return None if self.options.full_test: return 'FULL_TEST' if target.data.get('always_run'): return 'ALWAYS_RUN' if target.key in self.direct_targets: return 'EXPLICIT' history = self.test_history['items'].get(target.key) if not history: return 'NO_HISTORY' if history.result.exit_code != 0: return 'LAST_FAILED' last_time = history.result.start_time interval = time.time() - last_time if interval >= _TEST_EXPIRE_TIME or interval < 0: return 'STALE' if history.job.binary_md5 != binary_md5: return 'BINARY' if history.job.testdata_md5 != testdata_md5: return 'TESTDATA' if history.job.env_md5 != self.env_md5: return 'ENVIRONMENT' if history.job.args != self.options.args: return 'ARGUMENT' return None
def _check_code_style(targets): cpplint = config.get_item('cc_config', 'cpplint') if not cpplint: console.info('cpplint disabled') return 0 changed_files = _get_changed_files(targets, _BLADE_ROOT_DIR, _WORKING_DIR) if not changed_files: return 0 console.info('Begin to check code style for changed source code') p = subprocess.Popen(('%s %s' % (cpplint, ' '.join(changed_files))), shell=True) try: p.wait() if p.returncode != 0: if p.returncode == 127: msg = ("Can't execute '{0}' to check style, you can config the " "'cpplint' option to be a valid cpplint path in the " "'cc_config' section of blade.conf or BLADE_ROOT, or " "make sure '{0}' command is correct.").format(cpplint) else: msg = 'Please fixing style warnings before submitting the code!' console.warning(msg) except KeyboardInterrupt, e: console.error(str(e)) return 1