def _get_apt_related_files(self, filter_tags=None): path = self._get_apt_related_files_cache_path() if os.path.exists(path): return load_json_cache(path) else: info_path = os.path.join(self._cache_dir, 'freeline_annotation_info.json') if os.path.exists(info_path): info_cache = load_json_cache(info_path) related_files = [] for anno, files in info_cache.iteritems(): if filter_tags and anno not in filter_tags: self.debug('ignore annotation: {}'.format(anno)) continue for info in files: if info['module'] == self._name or info[ 'module'] in self._module_info[ 'local_module_dep']: if 'java_path' in info and info['java_path']: related_files.append(info['java_path']) write_json_cache(self._get_apt_related_files_cache_path(), related_files) return related_files return []
def get_local_resources_dependencies(res_type, config, module, project_info): res_dep_path = os.path.join(config['build_cache_dir'], module['name'], '{}_dependencies.json'.format(res_type)) res_dependencies = {'library_resources': [], 'local_resources': []} if os.path.exists(res_dep_path): res_dependencies = load_json_cache(res_dep_path) local_dep_res_path = [] if 'module_dependencies' not in config: local_dep_res_path = res_dependencies['local_resources'] else: local_res_deps = [] local_res_deps.extend(res_dependencies['local_resources']) deps = project_info[module['name']]['local_module_dep'] deps = find_all_dependent_modules(deps, deps, config) for m in deps: deppath = os.path.join(config['build_cache_dir'], m, '{}_dependencies.json'.format(res_type)) if os.path.exists(deppath): dep = load_json_cache(deppath) if 'local_resources' in dep: local_res_deps.extend(dep['local_resources']) local_dep_res_path = list(set(local_res_deps)) return res_dependencies['library_resources'], local_dep_res_path
def execute(self): cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') if not os.path.exists(cache_path): raise FileMissedException('{} not found.'.format(cache_path), ' re-run clean build.') self._stat_cache = load_json_cache(cache_path) project_info_cache_path = os.path.join(self._config['build_cache_dir'], 'project_info_cache.json') if os.path.exists(project_info_cache_path): self.project_info = load_json_cache(project_info_cache_path) else: self.project_info = get_project_info(self._config) write_json_cache(project_info_cache_path, self.project_info) build_info = self._get_build_info() for module_name, module_info in self.project_info.iteritems(): if module_name in self._stat_cache: self._changed_files[module_name] = {'libs': [], 'assets': [], 'res': [], 'src': [], 'manifest': [], 'config': [], 'so': [], 'cpp': []} self._scan_module_changes(module_name, module_info['path']) self._mark_changed_flag() return {'projects': self._changed_files, 'build_info': build_info}
def execute(self): cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') stat_cache = load_json_cache(cache_path) cache_path_md5 = os.path.join(self._config['build_cache_dir'], 'stat_cache_md5.json') stat_cache_md5 = load_json_cache(cache_path_md5) for module, file_dict in self._changed_files.iteritems(): for key, files in file_dict.iteritems(): if key != 'apt': for fpath in files: if not fpath.startswith(self._config['build_cache_dir'] ) and os.path.exists(fpath): self.debug('refresh {} stat'.format(fpath)) os.utime(fpath, None) if fpath not in stat_cache[module]: stat_cache[module][fpath] = {} if fpath in stat_cache_md5: stat_cache_md5[fpath] = get_md5(fpath) stat_cache[module][fpath][ 'mtime'] = os.path.getmtime(fpath) stat_cache[module][fpath][ 'size'] = os.path.getsize(fpath) write_json_cache(cache_path, stat_cache) write_json_cache(cache_path_md5, stat_cache_md5)
def execute(self): cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') if not os.path.exists(cache_path): raise FileMissedException('{} not found.'.format(cache_path), ' re-run clean build.') self._stat_cache = load_json_cache(cache_path) project_info_cache_path = os.path.join(self._config['build_cache_dir'], 'project_info_cache.json') if os.path.exists(project_info_cache_path): self.project_info = load_json_cache(project_info_cache_path) else: self.project_info = get_project_info(self._config) write_json_cache(project_info_cache_path, self.project_info) build_info = self._get_build_info() for module_name, module_info in self.project_info.iteritems(): self._changed_files[module_name] = { 'libs': [], 'assets': [], 'res': [], 'src': [], 'manifest': [], 'config': [] } self._scan_module_changes(module_name, module_info['path']) self._mark_changed_flag() return {'projects': self._changed_files, 'build_info': build_info}
def get_project_info(config): Logger.debug("collecting project info, please wait a while...") project_info = {} if 'modules' in config: modules = config['modules'] else: modules = get_all_modules(os.getcwd()) jar_dependencies_path = os.path.join(config['build_cache_dir'], 'jar_dependencies.json') jar_dependencies = [] if os.path.exists(jar_dependencies_path): jar_dependencies = load_json_cache(jar_dependencies_path) for module in modules: if module['name'] in config['project_source_sets']: module_info = {} module_info['name'] = module['name'] module_info['path'] = module['path'] module_info['relative_dir'] = module['path'] module_info['dep_jar_path'] = jar_dependencies module_info['packagename'] = get_package_name( config['project_source_sets'][ module['name']]['main_manifest_path']) gradle_content = remove_comments( get_file_content(os.path.join(module['path'], 'build.gradle'))) module_info['local_module_dep'] = get_local_dependency( gradle_content) project_info[module['name']] = module_info for module in modules: if module['name'] in config['project_source_sets']: local_deps = project_info[module['name']]['local_module_dep'] for dep in project_info[module['name']]['local_module_dep']: if dep in project_info: local_deps.extend(project_info[dep]['local_module_dep']) local_deps = list(set(local_deps)) project_info[module['name']]['local_module_dep'] = [] for item in local_deps: local_dep_name = get_module_name(item) if local_dep_name in project_info: project_info[module['name']]['local_module_dep'].append( local_dep_name) res_dependencies_path = os.path.join( config['build_cache_dir'], module['name'], 'resources_dependencies.json') res_dependencies = {'library_resources': [], 'local_resources': []} if os.path.exists(res_dependencies_path): res_dependencies = load_json_cache(res_dependencies_path) project_info[module['name']]['dep_res_path'] = res_dependencies[ 'library_resources'] project_info[module['name']][ 'local_dep_res_path'] = res_dependencies['local_resources'] return project_info
def get_project_info(config): Logger.debug("collecting project info, please wait a while...") project_info = {} if 'modules' in config: modules = config['modules'] else: modules = get_all_modules(os.getcwd()) jar_dependencies_path = os.path.join(config['build_cache_dir'], 'jar_dependencies.json') jar_dependencies = [] if os.path.exists(jar_dependencies_path): jar_dependencies = load_json_cache(jar_dependencies_path) for module in modules: if module['name'] in config['project_source_sets']: module_info = {} module_info['name'] = module['name'] module_info['path'] = module['path'] module_info['relative_dir'] = module['path'] module_info['dep_jar_path'] = jar_dependencies module_info['packagename'] = get_package_name( config['project_source_sets'][module['name']]['main_manifest_path']) gradle_content = remove_comments(get_file_content(os.path.join(module['path'], 'build.gradle'))) module_info['local_module_dep'] = get_local_dependency(gradle_content) project_info[module['name']] = module_info for module in modules: if module['name'] in config['project_source_sets']: local_deps = project_info[module['name']]['local_module_dep'] for dep in project_info[module['name']]['local_module_dep']: if dep in project_info: local_deps.extend(project_info[dep]['local_module_dep']) local_deps = list(set(local_deps)) project_info[module['name']]['local_module_dep'] = [] for item in local_deps: local_dep_name = get_module_name(item) if local_dep_name in project_info: project_info[module['name']]['local_module_dep'].append(local_dep_name) res_dependencies_path = os.path.join(config['build_cache_dir'], module['name'], 'resources_dependencies.json') res_dependencies = {'library_resources': [], 'local_resources': []} if os.path.exists(res_dependencies_path): res_dependencies = load_json_cache(res_dependencies_path) project_info[module['name']]['dep_res_path'] = res_dependencies['library_resources'] project_info[module['name']]['local_dep_res_path'] = res_dependencies['local_resources'] return project_info
def execute(self): if self._is_append: # reload config while append mode self.debug('generate_file_stat_task in append mode') from dispatcher import read_freeline_config self._config = read_freeline_config() self._stat_cache = load_json_cache(self._cache_path) if 'modules' in self._config: all_modules = self._config['modules'] else: all_modules = get_all_modules(os.getcwd()) if self._is_append and os.path.exists(self._cache_path): old_modules = self._stat_cache.keys() match_arr = [m['name'] for m in all_modules] match_map = {m['name']: m for m in all_modules} new_modules = [] for m in match_arr: if m not in old_modules: self.debug('find new module: {}'.format(m)) new_modules.append(match_map[m]) if len(new_modules) > 0: self._fill_cache_map(new_modules) self._save_cache() else: self.debug('no new modules found.') else: self._fill_cache_map(all_modules) self._save_cache()
def _get_backup_res_changed_list(self): respack_dir = self._finder.get_dst_res_pack_path(self._name) cache = load_json_cache(os.path.join(respack_dir, 'rchangelist.bak')) changed_list = cache.get('changed_list') if not changed_list: changed_list = [] return changed_list
def run_apt_only(self): if self._is_databinding_enabled and self._should_run_databinding_apt(): apt_args = self._generate_java_compile_args(extra_javac_args_enabled=True) self.debug('apt exec: ' + ' '.join(apt_args)) output, err, code = cexec(apt_args, callback=None) if code != 0: raise FreelineException('apt compile failed.', '{}\n{}'.format(output, err)) if self._apt_output_dir and os.path.exists(self._apt_output_dir): apt_cache_path = os.path.join(self._config['build_cache_dir'], 'apt_files_stat_cache.json') if os.path.exists(apt_cache_path): apt_cache = load_json_cache(apt_cache_path) for dirpath, dirnames, files in os.walk(self._apt_output_dir): for fn in files: fpath = os.path.join(dirpath, fn) if apt_cache and self._name in apt_cache: if fpath in apt_cache[self._name]: new_md5 = get_md5(fpath) if new_md5 != apt_cache[self._name][fpath]['md5']: self.debug('detect new md5 value, add apt file to change list: {}'.format(fpath)) self._changed_files['src'].append(fpath) else: self.debug('find new apt file, add to change list: {}'.format(fpath)) self._changed_files['src'].append(fpath) else: self.debug('apt cache not found, add to change list: {}'.format(fpath)) self._changed_files['src'].append(fpath)
def _get_backup_res_changed_list(self): respack_dir = self._finder.get_dst_res_pack_path(self._name) cache = load_json_cache(os.path.join(respack_dir, 'rchangelist.bak')) changed_list = cache.get('changed_list') if not changed_list: changed_list = [] return changed_list
def run_apt_only(self): if self._is_databinding_enabled and self._should_run_databinding_apt(): apt_args = self._generate_java_compile_args(extra_javac_args_enabled=True) self.debug('apt exec: ' + ' '.join(apt_args)) output, err, code = cexec(apt_args, callback=None) if code != 0: raise FreelineException('apt compile failed.', '{}\n{}'.format(output, err)) if self._apt_output_dir and os.path.exists(self._apt_output_dir): apt_cache_path = os.path.join(self._config['build_cache_dir'], 'apt_files_stat_cache.json') if os.path.exists(apt_cache_path): apt_cache = load_json_cache(apt_cache_path) for dirpath, dirnames, files in os.walk(self._apt_output_dir): for fn in files: fpath = os.path.join(dirpath, fn) if apt_cache and self._name in apt_cache: if fpath in apt_cache[self._name]: new_md5 = get_md5(fpath) if new_md5 != apt_cache[self._name][fpath]['md5']: self.debug('detect new md5 value, add apt file to change list: {}'.format(fpath)) self._changed_files['src'].append(fpath) else: self.debug('find new apt file, add to change list: {}'.format(fpath)) self._changed_files['src'].append(fpath) else: self.debug('apt cache not found, add to change list: {}'.format(fpath)) self._changed_files['src'].append(fpath)
def execute(self): if self._is_append: # reload config while append mode self.debug('generate_file_stat_task in append mode') from dispatcher import read_freeline_config self._config = read_freeline_config() self._stat_cache = load_json_cache(self._cache_path) if 'modules' in self._config: all_modules = self._config['modules'] else: all_modules = get_all_modules(os.getcwd()) if self._is_append and os.path.exists(self._cache_path): old_modules = self._stat_cache.keys() match_arr = [m['name'] for m in all_modules] match_map = {m['name']: m for m in all_modules} new_modules = [] for m in match_arr: if m not in old_modules: self.debug('find new module: {}'.format(m)) new_modules.append(match_map[m]) if len(new_modules) > 0: self._fill_cache_map(new_modules) self._save_cache() else: self.debug('no new modules found.') else: self._fill_cache_map(all_modules) self._save_cache()
def check_build_environment(self): CleanBuilder.check_build_environment(self) if self._project_info is None: project_info_cache_path = os.path.join(self._config['build_cache_dir'], 'project_info_cache.json') if os.path.exists(project_info_cache_path): self._project_info = load_json_cache(project_info_cache_path) else: self._project_info = get_project_info(self._config)
def check_build_environment(self): CleanBuilder.check_build_environment(self) if self._project_info is None: project_info_cache_path = os.path.join(self._config['build_cache_dir'], 'project_info_cache.json') if os.path.exists(project_info_cache_path): self._project_info = load_json_cache(project_info_cache_path) else: self._project_info = get_project_info(self._config)
def read_freeline_config(config_path=None): if not config_path: config_path = os.path.join(get_cache_dir(), 'project_description.json') if os.path.isfile(config_path): config = load_json_cache(config_path) return config raise NoConfigFoundException(config_path)
def _get_apt_related_files(self): path = self._get_apt_related_files_cache_path() if os.path.exists(path): return load_json_cache(path) else: info_path = os.path.join(self._cache_dir, 'freeline_annotation_info.json') if os.path.exists(info_path): info_cache = load_json_cache(info_path) related_files = [] for anno, files in info_cache.iteritems(): for info in files: if 'java_path' in info and info['java_path']: related_files.append(info['java_path']) write_json_cache(self._get_apt_related_files_cache_path(), related_files) return related_files return []
def read_freeline_config(config_path=None): if not config_path: config_path = os.path.join(get_cache_dir(), 'project_description.json') if os.path.isfile(config_path): config = load_json_cache(config_path) return config raise NoConfigFoundException(config_path)
def _get_apt_related_files(self, filter_tags=None): path = self._get_apt_related_files_cache_path() if os.path.exists(path): return load_json_cache(path) else: info_path = os.path.join(self._cache_dir, 'freeline_annotation_info.json') if os.path.exists(info_path): info_cache = load_json_cache(info_path) related_files = [] for anno, files in info_cache.iteritems(): if filter_tags and anno not in filter_tags: self.debug('ignore annotation: {}'.format(anno)) continue for info in files: if info['module'] == self._name or info['module'] in self._module_info['local_module_dep']: if 'java_path' in info and info['java_path']: related_files.append(info['java_path']) write_json_cache(self._get_apt_related_files_cache_path(), related_files) return related_files return []
def get_local_resources_dependencies(res_type, config, module, project_info): res_dep_path = os.path.join(config['build_cache_dir'], module['name'], '{}_dependencies.json'.format(res_type)) res_dependencies = {'library_resources': [], 'local_resources': []} if os.path.exists(res_dep_path): res_dependencies = load_json_cache(res_dep_path) local_dep_res_path = [] if 'module_dependencies' not in config: local_dep_res_path = res_dependencies['local_resources'] else: local_res_deps = [] local_res_deps.extend(res_dependencies['local_resources']) deps = project_info[module['name']]['local_module_dep'] deps = find_all_dependent_modules(deps, deps, config) for m in deps: deppath = os.path.join(config['build_cache_dir'], m, '{}_dependencies.json'.format(res_type)) if os.path.exists(deppath): dep = load_json_cache(deppath) if 'local_resources' in dep: local_res_deps.extend(dep['local_resources']) local_dep_res_path = list(set(local_res_deps)) return res_dependencies['library_resources'], local_dep_res_path
def execute(self): cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') stat_cache = load_json_cache(cache_path) cache_path_md5 = os.path.join(self._config['build_cache_dir'], 'stat_cache_md5.json') stat_cache_md5 = load_json_cache(cache_path_md5) for module, file_dict in self._changed_files.iteritems(): for key, files in file_dict.iteritems(): if key != 'apt': for fpath in files: if not fpath.startswith(self._config['build_cache_dir']) and os.path.exists(fpath): self.debug('refresh {} stat'.format(fpath)) os.utime(fpath, None) if fpath not in stat_cache[module]: stat_cache[module][fpath] = {} if fpath in stat_cache_md5: stat_cache_md5[fpath] = get_md5(fpath) stat_cache[module][fpath]['mtime'] = os.path.getmtime(fpath) stat_cache[module][fpath]['size'] = os.path.getsize(fpath) write_json_cache(cache_path, stat_cache) write_json_cache(cache_path_md5, stat_cache_md5)
def __init__(self, module_name, path, config, changed_files, module_info, is_art, all_module_info=None, module_dir_map=None, is_any_modules_have_res_changed=False, changed_modules=None): android_tools.AndroidIncBuildInvoker.__init__(self, module_name, path, config, changed_files, module_info, is_art=is_art) self._all_module_info = all_module_info self._module_dir_map = module_dir_map self._is_any_modules_have_res_changed = is_any_modules_have_res_changed self._changed_modules = changed_modules self._merged_res_paths = [] self._merged_res_paths.append(self._finder.get_backup_res_dir()) self._replace_mapper = {} self._is_retrolambda_enabled = 'retrolambda' in self._config and self._name in self._config[ 'retrolambda'] \ and self._config['retrolambda'][self._name]['enabled'] self._is_databinding_enabled = 'databinding_modules' in self._config and self._name in \ self._config[ 'databinding_modules'] self._is_dagger_enabled = 'apt_libraries' in self._config and self._config[ 'apt_libraries']['dagger'] self._apt_output_dir = None self._force_annotation_processor_files = load_json_cache( os.path.join(self._config['build_cache_dir'], "freeline-force-apt", module_name, 'force_annotation_processor_files.json')) for mname in self._all_module_info.keys(): if mname in self._config['project_source_sets']: self._merged_res_paths.extend( self._config['project_source_sets'][mname] ['main_res_directory']) self._merged_res_paths.extend( self._config['project_source_sets'][mname] ['main_assets_directory'])
def read_freeline_config(config_path=None): if not config_path: config_path = os.path.join(os.getcwd(), 'freeline_project_description.json') if os.path.isfile(config_path): config = load_json_cache(config_path) return config print("#############################################") print("# ERROR #") print("#############################################") print("# Project description file not found: ") print("# -> {}".format(config_path)) print("#") print("# To solve this error, please execute the command below:") print("#") print("# - Windows[CMD]: gradlew checkBeforeCleanBuild") print("# - Linux/Mac: ./gradlew checkBeforeCleanBuild") print("#") print("# Then, this problem will be solved.") print("#") print("#############################################") exit()
def read_freeline_config(config_path=None): if not config_path: config_path = os.path.join(os.getcwd(), 'freeline_project_description.json') if os.path.isfile(config_path): config = load_json_cache(config_path) return config print("#############################################") print("# ERROR #") print("#############################################") print("# Project description file not found: ") print("# -> {}".format(config_path)) print("#") print("# To solve this error, please execute the command below:") print("#") print("# - Windows[CMD]: gradlew checkBeforeCleanBuild") print("# - Linux/Mac: ./gradlew checkBeforeCleanBuild") print("#") print("# Then, this problem will be solved.") print("#") print("#############################################") exit()
def __update_class_related(self): # update class related changed_java_files = [] for module, file_dict in self._changed_files['projects'].iteritems(): if len(file_dict['src']) > 0: changed_java_files.extend(file_dict['src']) # process changed java files if len(changed_java_files) > 0: # update stat_cache.json cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') changefiles = ';'.join(changed_java_files) class_related_args = ['java', '-jar', os.path.join('freeline', 'release-tools', 'classrelated.jar'), cache_path, changefiles] self.debug('update class related: ' + ' '.join(class_related_args)) show_gradle_log = False if ArgsConfig.args is not None and ('gradlelog' in ArgsConfig.args and ArgsConfig.args.gradlelog): show_gradle_log = True output, err, code = cexec(class_related_args, callback=None, use_stdout=show_gradle_log) # read from stat_cache.json stat_cache = load_json_cache(cache_path) # ignore files ignore_java_files = ['UCR.java', 'UCContentProvider.java'] related_files = [] package_map = {} # read all package java files for module, file_dict in stat_cache.items(): for file in file_dict.keys(): package_name = self.__get_package(file) if package_name == '': continue if not package_map.has_key(package_name): same_package_files = [] package_map[package_name] = same_package_files else: same_package_files = package_map.get(package_name); same_package_files.append(file) # read all related java files for file in changed_java_files: for module, file_dict in stat_cache.items(): if file_dict.has_key(file): file_stat = file_dict[file] if file_stat.has_key('related'): related_files.extend(file_stat['related']) # read all same package files package_name = self.__get_package(file) if package_name != '' and package_map.has_key(package_name): same_package_files = package_map[package_name] related_files.extend(same_package_files) related_files = list(set(related_files)) if len(related_files) > 0: # update self._changed_files['projects'] module's file_dict['src'] for module, file_dict in stat_cache.items(): for file in related_files: if file_dict.has_key(file): self._changed_files['projects'][module]['src'].append(file) self.debug('updated file changed list:') self.debug(self._changed_files)
def run_retrolambda(self): if self._is_need_javac and self._is_retrolambda_enabled: lambda_config = self._config['retrolambda'][self._name] target_dir = self._finder.get_patch_classes_cache_dir() jar_args = [Builder.get_java(self._config), '-Dretrolambda.inputDir={}'.format(target_dir), '-Dretrolambda.outputDir={}'.format(target_dir)] if lambda_config['supportIncludeFiles']: files_stat_path = os.path.join(self._cache_dir, self._name, 'lambda_files_stat.json') include_files = [] if os.path.exists(files_stat_path): files_stat = load_json_cache(files_stat_path) else: files_stat = {} for dirpath, dirnames, files in os.walk(target_dir): for fn in files: fpath = os.path.join(dirpath, fn) if fpath not in files_stat: include_files.append(fpath) self.debug('incremental build new lambda file: {}'.format(fpath)) else: if os.path.getmtime(fpath) > files_stat[fpath]['mtime']: include_files.append(fpath) self.debug('incremental build lambda file: {}'.format(fpath)) include_files_param = os.pathsep.join(include_files) if len(include_files_param) > 3496: include_files_path = os.path.join(self._cache_dir, self._name, 'retrolambda_inc.list') self.__save_parms_to_file(include_files_path, include_files) jar_args.append('-Dretrolambda.includedFile={}'.format(include_files_path)) else: jar_args.append('-Dretrolambda.includedFiles={}'.format(include_files_param)) lambda_classpaths = [target_dir, lambda_config['rtJar']] lambda_classpaths.extend(self._classpaths) param = os.pathsep.join(lambda_classpaths) if lambda_config['supportIncludeFiles'] and len(param) > 3496: classpath_file = os.path.join(self._cache_dir, self._name, 'retrolambda_classpaths.path') self.__save_parms_to_file(classpath_file, lambda_classpaths) jar_args.append('-Dretrolambda.classpathFile={}'.format(classpath_file)) else: jar_args.append('-Dretrolambda.classpath={}'.format(param)) jar_args.append('-cp') jar_args.append(lambda_config['targetJar']) jar_args.append(lambda_config['mainClass']) self.debug('retrolambda exec: ' + ' '.join(jar_args)) output, err, code = cexec(jar_args, callback=None) if code != 0: raise FreelineException('retrolambda compile failed.', '{}\n{}'.format(output, err)) if lambda_config['supportIncludeFiles']: for fpath in include_files: if fpath not in files_stat: files_stat[fpath] = {} files_stat[fpath]['mtime'] = os.path.getmtime(fpath) write_json_cache(files_stat_path, files_stat) self.debug('save lambda files stat to {}'.format(files_stat_path))
def run_retrolambda(self): if self._is_need_javac and self._is_retrolambda_enabled: lambda_config = self._config['retrolambda'][self._name] target_dir = self._finder.get_patch_classes_cache_dir() jar_args = [Builder.get_java(self._config), '-Dretrolambda.inputDir={}'.format(target_dir), '-Dretrolambda.outputDir={}'.format(target_dir)] if lambda_config['supportIncludeFiles']: files_stat_path = os.path.join(self._cache_dir, self._name, 'lambda_files_stat.json') include_files = [] if os.path.exists(files_stat_path): files_stat = load_json_cache(files_stat_path) else: files_stat = {} for dirpath, dirnames, files in os.walk(target_dir): for fn in files: fpath = os.path.join(dirpath, fn) if fpath not in files_stat: include_files.append(fpath) self.debug('incremental build new lambda file: {}'.format(fpath)) else: if os.path.getmtime(fpath) > files_stat[fpath]['mtime']: include_files.append(fpath) self.debug('incremental build lambda file: {}'.format(fpath)) include_files_param = os.pathsep.join(include_files) if len(include_files_param) > 3496: include_files_path = os.path.join(self._cache_dir, self._name, 'retrolambda_inc.list') self.__save_parms_to_file(include_files_path, include_files) jar_args.append('-Dretrolambda.includedFile={}'.format(include_files_path)) else: jar_args.append('-Dretrolambda.includedFiles={}'.format(include_files_param)) lambda_classpaths = [target_dir, lambda_config['rtJar']] lambda_classpaths.extend(self._classpaths) param = os.pathsep.join(lambda_classpaths) if lambda_config['supportIncludeFiles'] and len(param) > 3496: classpath_file = os.path.join(self._cache_dir, self._name, 'retrolambda_classpaths.path') self.__save_parms_to_file(classpath_file, lambda_classpaths) jar_args.append('-Dretrolambda.classpathFile={}'.format(classpath_file)) else: jar_args.append('-Dretrolambda.classpath={}'.format(param)) jar_args.append('-cp') jar_args.append(lambda_config['targetJar']) jar_args.append(lambda_config['mainClass']) self.debug('retrolambda exec: ' + ' '.join(jar_args)) output, err, code = cexec(jar_args, callback=None) if code != 0: raise FreelineException('retrolambda compile failed.', '{}\n{}'.format(output, err)) if lambda_config['supportIncludeFiles']: for fpath in include_files: if fpath not in files_stat: files_stat[fpath] = {} files_stat[fpath]['mtime'] = os.path.getmtime(fpath) write_json_cache(files_stat_path, files_stat) self.debug('save lambda files stat to {}'.format(files_stat_path))
def __update_class_related(self): # update class related changed_java_files = [] for module, file_dict in self._changed_files['projects'].iteritems(): if len(file_dict['src']) > 0: changed_java_files.extend(file_dict['src']) # process changed java files if len(changed_java_files) > 0: # update stat_cache.json cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') changefiles = ';'.join(changed_java_files) class_related_args = ['java', '-jar', os.path.join('freeline', 'release-tools', 'classrelated.jar'), cache_path, changefiles] self.debug('update class related: ' + ' '.join(class_related_args)) #show_gradle_log = False #if ArgsConfig.args is not None and ('gradlelog' in ArgsConfig.args and ArgsConfig.args.gradlelog): show_gradle_log = True output, err, code = cexec(class_related_args, callback=None) # read from stat_cache.json stat_cache = load_json_cache(cache_path) # ignore files ignore_java_files = ['UCR.java', 'UCContentProvider.java'] related_files = [] package_map = {} # read all package java files for module, file_dict in stat_cache.items(): for file in file_dict.keys(): package_name = self.__get_package(file) if package_name == '': continue if not package_map.has_key(package_name): same_package_files = [] package_map[package_name] = same_package_files else: same_package_files = package_map.get(package_name); same_package_files.append(file) # read all related java files for file in changed_java_files: for module, file_dict in stat_cache.items(): if file_dict.has_key(file): file_stat = file_dict[file] if file_stat.has_key('related'): related_files.extend(file_stat['related']) # read all same package files package_name = self.__get_package(file) if package_name != '' and package_map.has_key(package_name): same_package_files = package_map[package_name] related_files.extend(same_package_files) related_files = list(set(related_files)) if len(related_files) > 0: # update self._changed_files['projects'] module's file_dict['src'] for module, file_dict in stat_cache.items(): for file in related_files: if file_dict.has_key(file): self._changed_files['projects'][module]['src'].append(file) self.debug('updated file changed list:') self.debug(self._changed_files)