def generate_fat_jar(output, conflict_severity, args): """Generate a fat jar containing the contents of all the jar dependencies.""" target = output jars = args target_dir = os.path.dirname(output) if not os.path.exists(target_dir): os.makedirs(target_dir) target_fat_jar = zipfile.ZipFile(target, 'w', zipfile.ZIP_DEFLATED) # Record paths written in the fat jar to avoid duplicate writing path_jar_dict = {} conflicts = [] for dep_jar in jars: jar = zipfile.ZipFile(dep_jar, 'r') name_list = jar.namelist() for name in name_list: if name.endswith('/') or not _is_fat_jar_excluded(name): if name not in path_jar_dict: target_fat_jar.writestr(name, jar.read(name)) path_jar_dict[name] = dep_jar else: if name.endswith('/'): continue message = ( '%s: Duplicate path %s found in {%s, %s}' % (target, name, os.path.basename( path_jar_dict[name]), os.path.basename(dep_jar))) # Always log all conflicts for diagnosis console.debug(message) if '/.m2/repository/' not in dep_jar: # There are too many conflicts between maven jars, # so we have to ignore them, only count source code conflicts conflicts.append('\n'.join([ 'Path: %s' % name, 'From: %s' % path_jar_dict[name], 'Ignored: %s' % dep_jar, ])) jar.close() if conflicts: getattr(console, conflict_severity)('%s: Found %d conflicts when packaging.' % (target, len(conflicts))) if conflict_severity == 'error': raise RuntimeError('fat jar packing conflict') generate_fat_jar_metadata(target_fat_jar, jars, conflicts) contents = [ 'Manifest-Version: 1.0', 'Created-By: Python.Zipfile (Blade)', 'Built-By: %s' % os.getenv('USER'), 'Build-Time: %s' % time.asctime(), ] contents += _manifest_scm(target.split(os.sep)[0]) contents.append('\n') target_fat_jar.writestr(_JAR_MANIFEST, '\n'.join(contents)) target_fat_jar.close()
def download_all(self): """Download all needed maven artifacts""" concurrency = config.get_item('java_config', 'maven_download_concurrency') num_threads = min(self.__to_download.qsize(), concurrency) if num_threads == 0: return console.info('Downloading maven_jars, concurrency=%d ...' % num_threads) threads = [] for i in range(num_threads): thread = threading.Thread(target=self._download_worker) thread.start() threads.append(thread) try: self.__to_download.join() except KeyboardInterrupt: console.error('KeyboardInterrupt') while not self.__to_download.empty(): try: self.__to_download.get_nowait() except queue.Empty: pass finally: console.debug('join threads') for thread in threads: thread.join() console.debug('join threads done') console.info('Downloading maven_jars done.')
def _generate_java_coverage_report(self): java_test_config = config.get_section('java_test_config') jacoco_home = java_test_config['jacoco_home'] coverage_reporter = java_test_config['coverage_reporter'] if not jacoco_home or not coverage_reporter: console.warning('Missing jacoco home or coverage report generator ' 'in global configuration. ' 'Abort java coverage report generation.') return jacoco_libs = os.path.join(jacoco_home, 'lib', 'jacocoant.jar') report_dir = os.path.join(self.build_dir, 'java', 'coverage_report') if not os.path.exists(report_dir): os.makedirs(report_dir) coverage_data = self._get_java_coverage_data() if coverage_data: java = 'java' java_home = config.get_item('java_config', 'java_home') if java_home: java = os.path.join(java_home, 'bin', 'java') cmd = [ '%s -classpath %s:%s com.tencent.gdt.blade.ReportGenerator' % (java, coverage_reporter, jacoco_libs) ] cmd.append(report_dir) for data in coverage_data: cmd.append(','.join(data)) cmd_str = ' '.join(cmd) console.info('Generating java coverage report') console.debug(cmd_str) if subprocess.call(cmd_str, shell=True) != 0: console.warning('Failed to generate java coverage report')
def _check_ccache_install(): """Check ccache is installed or not.""" CC = os.getenv('CC') CXX = os.getenv('CXX') # clang scan-build always fail with ccache. if CC and os.path.basename(CC) == 'ccc-analyzer' and CXX and os.path.basename(CXX) == 'c++-analyzer': console.debug('Ccache is disabled for scan-build') return False try: p = subprocess.Popen( ['ccache', '-V'], env=os.environ, stderr=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode == 0: version_line = stdout.splitlines(True)[0] if version_line and version_line.find('ccache version') != -1: console.debug('Ccache found') return True except OSError: pass return False
def git(cmd): p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() if p.returncode != 0: console.debug('Failed to generate git scm: %s' % stderr) return '' return stdout
def verify(self): """Verify specific targets after build is complete.""" console.debug('Verifing header dependency missing...') verify_history = self._load_verify_history() header_inclusion_history = verify_history[ 'header_inclusion_dependencies'] error = 0 verify_details = {} undeclared_hdrs = set() verify_suppress = config.get_item('cc_config', 'hdr_dep_missing_suppress') # Sorting helps reduce jumps between BUILD files when fixng reported problems for k in sorted(self.__expanded_command_targets): target = self.__build_targets[k] if target.type.startswith('cc_') and target.srcs: ok, details, target_undeclared_hdrs = target.verify_hdr_dep_missing( header_inclusion_history, verify_suppress.get(target.key, {})) if not ok: error += 1 if details: verify_details[target.key] = details undeclared_hdrs |= target_undeclared_hdrs self._dump_verify_details(verify_details) self._dump_verify_history() self._dump_undeclared_hdrs(undeclared_hdrs) console.debug('Verifing header dependency missing done.') return error == 0
def _cleanup_outputs(): """Cleanup declared output files on error.""" console.debug('Cleanup:%s' % _outputs) for output in _outputs: try: os.remove(output) except OSError: pass
def _run_backend_builder(cmdstr): console.debug('Run build command: ' + cmdstr) p = subprocess.Popen(cmdstr, shell=True) try: p.wait() return p.returncode # pylint: disable=bare-except except: # KeyboardInterrupt return 1
def _run_ninja_command(cmdstr): """Run "ninja" command without interactive.""" console.debug('Run build command: ' + cmdstr) p = subprocess.Popen(cmdstr, shell=True) try: p.wait() return p.returncode # pylint: disable=bare-except except: # KeyboardInterrupt return 1
def lazy_init(self, reason): if self._initialized: return console.debug("Load global declaration file, " + reason) declaration = pickle.load(open(self._declaration_file, 'rb')) # pylint: disable=attribute-defined-outside-init self._hdr_targets_map = declaration['public_hdrs'] self._hdr_dir_targets_map = declaration['public_incs'] self._private_hdrs_target_map = declaration['private_hdrs'] self._allowed_undeclared_hdrs = declaration['allowed_undeclared_hdrs'] self._initialized = True
def _detect_maven_conflicted_deps(self, scope, dep_jars): """ Maven dependencies might have conflict: same group and artifact but different version. Select higher version by default unless a specific version of maven dependency is specified as a direct dependency of the target """ # pylint: disable=too-many-locals maven_jar_versions = {} # (group, artifact) -> versions maven_jars = {} # (group, artifact, version) -> jars maven_repo = '.m2/repository/' for jar in set(dep_jars): if maven_repo not in jar or not os.path.exists(jar): console.debug('%s: %s not found in local maven repository' % (self.fullname, jar)) continue parts = jar[jar.find(maven_repo) + len(maven_repo):].split('/') if len(parts) < 4: continue version, artifact, group = parts[-2], parts[-3], '.'.join( parts[:-3]) key = group, artifact if key in maven_jar_versions: if version not in maven_jar_versions[key]: maven_jar_versions[key].append(version) else: maven_jar_versions[key] = [version] key = group, artifact, version if key in maven_jars: maven_jars[key].append(jar) else: maven_jars[key] = [jar] maven_dep_ids = self._get_maven_dep_ids() jars = [] for (group, artifact), versions in maven_jar_versions.iteritems(): if len(versions) == 1: picked_version = versions[0] else: picked_version = None for v in versions: maven_id = ':'.join((group, artifact, v)) if maven_id in maven_dep_ids: picked_version = v break if picked_version is None or LooseVersion( v) > LooseVersion(picked_version): picked_version = v console.debug( '%s: Maven dependency version conflict %s:%s:{%s} during %s. Use %s' % (self.fullname, group, artifact, ', '.join(versions), scope, picked_version)) jars += maven_jars[group, artifact, picked_version] return sorted(jars)
def _load_test_history(self): if os.path.exists(_TEST_HISTORY_FILE): try: with open(_TEST_HISTORY_FILE) as f: # pylint: disable=eval-used self.test_history = eval(f.read()) except (IOError, SyntaxError, NameError, TypeError) as e: console.debug('Exception when loading test history: %s' % e) console.warning('error loading incremental test history, will run full test') if 'items' not in self.test_history: self.test_history['items'] = {}
def _detect_maven_conflicted_deps(self, scope, dep_jars): """ Maven dependencies might have conflict: same group and artifact but different version. Select higher version by default unless a specific version of maven dependency is specified as a direct dependency of the target """ # pylint: disable=too-many-locals maven_jar_versions = {} # (group, artifact) -> versions maven_jars = {} # (group, artifact, version) -> jars maven_repo = '.m2/repository/' for jar in set(dep_jars): if maven_repo not in jar or not os.path.exists(jar): console.debug('%s: %s not found in local maven repository' % ( self.fullname, jar)) continue parts = jar[jar.find(maven_repo) + len(maven_repo):].split('/') if len(parts) < 4: continue version, artifact, group = parts[-2], parts[-3], '.'.join(parts[:-3]) key = group, artifact if key in maven_jar_versions: if version not in maven_jar_versions[key]: maven_jar_versions[key].append(version) else: maven_jar_versions[key] = [version] key = group, artifact, version if key in maven_jars: maven_jars[key].append(jar) else: maven_jars[key] = [jar] maven_dep_ids = self._get_maven_dep_ids() jars = [] for (group, artifact), versions in maven_jar_versions.iteritems(): if len(versions) == 1: picked_version = versions[0] else: picked_version = None for v in versions: maven_id = ':'.join((group, artifact, v)) if maven_id in maven_dep_ids: picked_version = v break if picked_version is None or LooseVersion(v) > LooseVersion(picked_version): picked_version = v console.debug('%s: Maven dependency version conflict %s:%s:{%s} during %s. Use %s' % ( self.fullname, group, artifact, ', '.join(versions), scope, picked_version)) jars += maven_jars[group, artifact, picked_version] return sorted(jars)
def generate_scm(build_dir): if os.path.isdir('.git'): url, revision = generate_scm_git() elif os.path.isdir('.svn'): url, revision = generate_scm_svn() else: console.debug('Unknown scm.') return path = os.path.join(build_dir, 'scm.json') with open(path, 'w') as f: json.dump({ 'revision': revision, 'url': url, }, f)
def _detect_maven_conflicted_deps(self, scope, dep_jars): """ Maven dependencies might have conflict: same group and artifact but different version. Select higher version by default unless a specific version of maven dependency is specified as a direct dependency of the target """ # pylint: disable=too-many-locals dep_jars, conflicted_jars = set(dep_jars), set() maven_dep_ids = self._get_maven_dep_ids() maven_jar_dict = {} # (group, artifact) -> (version, set(jar)) maven_repo = '.m2/repository/' for dep_jar in dep_jars: if maven_repo not in dep_jar or not os.path.exists(dep_jar): console.debug('%s: %s not found in local maven repository' % (self.fullname, dep_jar)) continue parts = dep_jar[dep_jar.find(maven_repo) + len(maven_repo):].split('/') if len(parts) < 4: continue name, version, artifact, group = (parts[-1], parts[-2], parts[-3], '.'.join(parts[:-3])) key = (group, artifact) id = ':'.join((group, artifact, version)) if key in maven_jar_dict: old_version, old_jars = maven_jar_dict[key] if version == old_version: # jar name must be different because dep_jars is a set old_jars.add(dep_jar) continue old_id = ':'.join((group, artifact, old_version)) if old_id in maven_dep_ids: conflicted_jars.add(dep_jar) elif id in maven_dep_ids or LooseVersion( version) > LooseVersion(old_version): conflicted_jars |= old_jars maven_jar_dict[key] = (version, set([dep_jar])) else: conflicted_jars.add(dep_jar) value = maven_jar_dict[key] console.debug('%s: Maven dependency version conflict ' '%s:%s:{%s, %s} during %s. Use %s' % (self.fullname, key[0], key[1], version, old_version, scope, value[0])) else: maven_jar_dict[key] = (version, set([dep_jar])) dep_jars -= conflicted_jars return sorted(dep_jars)
def _check_distcc_install(): """Check distcc is installed or not. """ p = subprocess.Popen('distcc --version', env={}, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode == 0: version_line = stdout.splitlines(True)[0] if version_line and version_line.find('distcc') != -1: console.debug('distcc found') return True
def generate(self): """Run jacococli to generate coverage report""" if not self.__coverage_targets: console.debug('No jacoco supported targets') return report_dir = os.path.join(self.__build_dir, 'jacoco_coverage_report') console.info('Generating java coverage report `%s`' % report_dir) execfiles = self._collect_execfiles() if not execfiles: console.warning('jacoco exec files not found') return jacoco_home = config.get_item('java_test_config', 'jacoco_home') if not jacoco_home: console.warning('Missing jacoco home in java_test configuration. ' 'Abort java coverage report generation.') return self._check_java_debug_options() if not os.path.exists(report_dir): os.makedirs(report_dir) java = 'java' java_home = config.get_item('java_config', 'java_home') if java_home: java = os.path.join(java_home, 'bin', 'java') jacococli = os.path.join(jacoco_home, 'lib', 'jacococli.jar') classes_dirs = self._collect_classes() source_dirs = self._collect_sources() # See https://www.jacoco.org/jacoco/trunk/doc/cli.html cmd = [java, '-jar', jacococli, 'report', '--quiet'] cmd += execfiles cmd += self._cut_in_before_each('--classfiles', classes_dirs) cmd += self._cut_in_before_each('--sourcefiles', source_dirs) cmd += ['--html', report_dir] cmd += ['--csv', report_dir + '.csv'] cmd += ['--xml', report_dir + '.xml'] console.debug(' '.join(cmd)) # NOTE: If call with(cmd:str, shell=True), may cause a 'command line too long' error # Pass cmd as a list and shell=False solves this problem if subprocess.call(cmd, shell=False) != 0: console.warning('Failed to generate java coverage report') return self._postprocess_report(report_dir)
def __init__(self, index, job_queue, job_handler, redirect): """Init methods for this thread.""" super(WorkerThread, self).__init__() self.index = index self.running = True self.job_queue = job_queue self.job_handler = job_handler self.redirect = redirect self.job_start_time, self.job_timeout = 0, 0 self.job_process = None self.job_name = '' self.job_is_timeout = False self.job_lock = threading.Lock() console.debug('Test worker %d starts to work' % self.index)
def __init__(self, blade_root_dir, toolchain, distcc_host_list=None): # ccache self.blade_root_dir = blade_root_dir self.__toolchain = toolchain self.ccache_installed = self._check_ccache_install() # distcc self.distcc_env_prepared = False self.distcc_installed = self._check_distcc_install() self.distcc_host_list = distcc_host_list or os.environ.get('DISTCC_HOSTS', '') if self.distcc_installed and self.distcc_host_list: self.distcc_env_prepared = True console.info('Distcc is enabled automatically due DISTCC_HOSTS set') distcc_log_file = os.environ.get('DISTCC_LOG', '') if distcc_log_file: console.debug('Distcc log: %s' % distcc_log_file)
def generate_scm_svn(): url = revision = 'unknown' p = subprocess.Popen('svn info', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() if p.returncode != 0: console.debug('Failed to generate svn scm: %s' % stderr) else: for line in stdout.splitlines(): if line.startswith('URL: '): url = line.strip().split()[-1] if line.startswith('Revision: '): revision = line.strip().split()[-1] break return url, revision
def clean(self): """Clean specific generated target files or directories""" console.info('Cleaning...') paths = [] for key in self.__expanded_command_targets: target = self.__build_targets[key] clean_list = target.get_clean_list() console.debug('Cleaning %s: %s' % (target.fullname, clean_list)) # Batch removing is much faster than one by one paths += clean_list if len(paths) > 10000: # Avoid 'Argument list too long' error. self._remove_paths(paths) paths[:] = [] if paths: self._remove_paths(paths) console.info('Cleaning done.') return 0
def generate_scm(build_dir): # TODO(wentingli): Add git scm p = subprocess.Popen('svn info', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() if p.returncode != 0: console.debug('Failed to generate scm: %s' % stderr) return revision = url = 'unknown' for line in stdout.splitlines(): if line.startswith('URL: '): url = line.strip().split()[-1] if line.startswith('Revision: '): revision = line.strip().split()[-1] break path = os.path.join(build_dir, 'scm.json') with open(path, 'w') as f: json.dump({ 'revision' : revision, 'url' : url, }, f)
def _wait_worker_threads(self, threads): """Wait for worker threads to complete. """ test_timeout = config.get_item('global_config', 'test_timeout') try: while threads: time.sleep(1) # Check every second now = time.time() dead_threads = [] for t in threads: if t.isAlive(): if test_timeout is not None: t.check_job_timeout(now) else: dead_threads.append(t) for dt in dead_threads: threads.remove(dt) except KeyboardInterrupt: console.debug('KeyboardInterrupt: Terminate workers...') for t in threads: t.terminate() raise
def _find_or_generate_target_ninja_file(self, target): # The `.build.` infix is used to avoid the target ninja file with the # same name as the main build.ninja file (when target.name == 'build') target_ninja = target._target_file_path('%s.build.ninja' % target.name) old_rule_hash = self._read_rule_hash(target_ninja) rule_hash = target.rule_hash() if rule_hash == old_rule_hash: console.debug('Using cached %s' % target_ninja) # If the command is "clean", we still need to generate rules to obtain the clean list if self.__command == 'clean': target.get_rules() return target_ninja rules = target.get_rules() if rules: console.debug('Generating %s' % target_ninja) self._write_target_ninja_file(target, target_ninja, rules, rule_hash) return target_ninja return None
def generate_scm(build_dir): # TODO(wentingli): Add git scm p = subprocess.Popen('svn info', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() if p.returncode != 0: console.debug('Failed to generate scm: %s' % stderr) return revision = url = 'unknown' for line in stdout.splitlines(): if line.startswith('URL: '): url = line.strip().split()[-1] if line.startswith('Revision: '): revision = line.strip().split()[-1] break path = os.path.join(build_dir, 'scm.json') with open(path, 'w') as f: json.dump({ 'revision': revision, 'url': url, }, f)
def _find_or_generate_target_ninja_file(self, target): # The `.build.` infix is used to avoid the target ninja file with the # same name as the main build.ninja file (when target.name == 'build') target_ninja = target._target_file_path('%s.build.ninja' % target.name) old_fingerprint = self._read_fingerprint(target_ninja) fingerprint = target.fingerprint() if fingerprint == old_fingerprint: console.debug('Using cached %s' % target_ninja) # If the command is "clean", we still need to generate rules to obtain the clean list if self.__command == 'clean': target.get_build_code() return target_ninja code = target.get_build_code() if code: console.debug('Generating %s' % target_ninja) self._write_target_ninja_file(target, target_ninja, code, fingerprint) return target_ninja return None
def _generate_jacoco_coverage_report(self): """Run jacococli to generate coverage report""" # TODO(chen3feng): Support generating other formats java_test_config = config.get_section('java_test_config') jacoco_home = java_test_config['jacoco_home'] if not jacoco_home: console.warning('Missing jacoco home in java_test configuration. ' 'Abort java coverage report generation.') return report_dir = os.path.join(self.build_dir, 'jacoco_coverage_report') if not os.path.exists(report_dir): os.makedirs(report_dir) execfiles, classes_dirs, source_dirs = self._get_jacoco_coverage_data() if execfiles: java = 'java' java_home = config.get_item('java_config', 'java_home') if java_home: java = os.path.join(java_home, 'bin', 'java') jacococli = os.path.join(jacoco_home, 'lib', 'jacococli.jar') classfiles = ['--classfiles ' + files for files in classes_dirs] sourcefiles = ['--sourcefiles ' + files for files in source_dirs] # See https://www.jacoco.org/jacoco/trunk/doc/cli.html cmd_str = ( '{java} -jar {jacococli} report {execfiles} {classfiles} {sourcefiles} ' '--html {report_dir}').format( java=java, jacococli=jacococli, execfiles=' '.join(execfiles), classfiles=' '.join(classfiles), sourcefiles=' '.join(sourcefiles), report_dir=report_dir) console.info('Generating java coverage report `%s`' % report_dir) console.debug(cmd_str) if subprocess.call(cmd_str, shell=True) != 0: console.warning('Failed to generate java coverage report')
def debug(self, msg): """Print message with target full name prefix""" console.debug('//%s: %s' % (self.fullname, msg))
def generate_compliation_flags(self): """Generates compliation flags. """ # pylint: disable=too-many-locals toolchain_dir = os.environ.get('TOOLCHAIN_DIR', '') if toolchain_dir and not toolchain_dir.endswith('/'): toolchain_dir += '/' cpp = toolchain_dir + os.environ.get('CPP', 'cpp') cc = toolchain_dir + os.environ.get('CC', 'gcc') cxx = toolchain_dir + os.environ.get('CXX', 'g++') ld = toolchain_dir + os.environ.get('LD', 'g++') console.debug('CPP=%s' % cpp) console.debug('CC=%s' % cc) console.debug('CXX=%s' % cxx) console.debug('LD=%s' % ld) # To modify CC, CXX, LD according to the building environment and # project configuration build_with_distcc = (self.distcc_enabled and self.build_environment.distcc_env_prepared) cc_str = self._append_prefix_to_building_var( prefix='distcc', building_var=cc, condition=build_with_distcc) cxx_str = self._append_prefix_to_building_var( prefix='distcc', building_var=cxx, condition=build_with_distcc) build_with_ccache = self.build_environment.ccache_installed cc_str = self._append_prefix_to_building_var( prefix='ccache', building_var=cc_str, condition=build_with_ccache) cxx_str = self._append_prefix_to_building_var( prefix='ccache', building_var=cxx_str, condition=build_with_ccache) cc_config = config.get_section('cc_config') cc_env_str = ('CC="%s", CXX="%s", SECURECXX="%s %s"' % ( cc_str, cxx_str, cc_config['securecc'], cxx)) ld_env_str = 'LINK="%s"' % ld extra_incs = cc_config['extra_incs'] extra_incs_str = ', '.join(['"%s"' % inc for inc in extra_incs]) if not extra_incs_str: extra_incs_str = '""' (cppflags_except_warning, linkflags) = self.ccflags_manager.get_flags_except_warning() linkflags += cc_config['linkflags'] self._add_rule('top_env.Replace(%s, ' 'CPPPATH=[%s, "%s", "%s"], ' 'CPPFLAGS=%s, CFLAGS=%s, CXXFLAGS=%s, ' '%s, LINKFLAGS=%s)' % (cc_env_str, extra_incs_str, self.build_dir, self.python_inc, cc_config['cppflags'] + cppflags_except_warning, cc_config['cflags'], cc_config['cxxflags'], ld_env_str, linkflags)) cc_library_config = config.get_section('cc_library_config') # By default blade use 'ar rcs' and skip ranlib # to generate index for static library arflags = ''.join(cc_library_config['arflags']) self._add_rule('top_env.Replace(ARFLAGS="%s")' % arflags) ranlibflags = cc_library_config['ranlibflags'] if ranlibflags: self._add_rule('top_env.Replace(RANLIBFLAGS="%s")' % ''.join(ranlibflags)) else: self._add_rule('top_env.Replace(RANLIBCOM="", RANLIBCOMSTR="")') # The default ASPPFLAGS of scons is same as ASFLAGS, # this is incorrect for gcc/gas options = self.options if options.m: self._add_rule('top_env.Replace(ASFLAGS=["-g", "--%s"])' % options.m) self._add_rule('top_env.Replace(ASPPFLAGS="-Wa,--%s")' % options.m) self._setup_cache() if build_with_distcc: self.build_environment.setup_distcc_env() for rule in self.build_environment.get_rules(): self._add_rule(rule) self._setup_envs()
def debug(self, msg): """Print message with target full name prefix""" console.debug('%s: %s' % (self.source_location, msg), prefix=False)
def debug(self, msg): """Print message with target full name prefix""" console.debug(self._format_message('debug', msg), prefix=False)