def execute(self): cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') if not os.path.exists(cache_path): raise FileMissedException('{} not found.'.format(cache_path), ' re-run clean build.') self._stat_cache = load_json_cache(cache_path) project_info_cache_path = os.path.join(self._config['build_cache_dir'], 'project_info_cache.json') if os.path.exists(project_info_cache_path): self.project_info = load_json_cache(project_info_cache_path) else: self.project_info = get_project_info(self._config) write_json_cache(project_info_cache_path, self.project_info) build_info = self._get_build_info() for module_name, module_info in self.project_info.iteritems(): if module_name in self._stat_cache: self._changed_files[module_name] = {'libs': [], 'assets': [], 'res': [], 'src': [], 'manifest': [], 'config': [], 'so': [], 'cpp': []} self._scan_module_changes(module_name, module_info['path']) self._mark_changed_flag() return {'projects': self._changed_files, 'build_info': build_info}
def execute(self): # reload project info from dispatcher import read_freeline_config config = read_freeline_config() write_json_cache(os.path.join(config['build_cache_dir'], 'project_info_cache.json'), get_project_info(config))
def _get_apt_related_files(self, filter_tags=None): path = self._get_apt_related_files_cache_path() if os.path.exists(path): return load_json_cache(path) else: info_path = os.path.join(self._cache_dir, 'freeline_annotation_info.json') if os.path.exists(info_path): info_cache = load_json_cache(info_path) related_files = [] for anno, files in info_cache.iteritems(): if filter_tags and anno not in filter_tags: self.debug('ignore annotation: {}'.format(anno)) continue for info in files: if info['module'] == self._name or info[ 'module'] in self._module_info[ 'local_module_dep']: if 'java_path' in info and info['java_path']: related_files.append(info['java_path']) write_json_cache(self._get_apt_related_files_cache_path(), related_files) return related_files return []
def execute(self): cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') if not os.path.exists(cache_path): raise FileMissedException('{} not found.'.format(cache_path), ' re-run clean build.') self._stat_cache = load_json_cache(cache_path) project_info_cache_path = os.path.join(self._config['build_cache_dir'], 'project_info_cache.json') if os.path.exists(project_info_cache_path): self.project_info = load_json_cache(project_info_cache_path) else: self.project_info = get_project_info(self._config) write_json_cache(project_info_cache_path, self.project_info) build_info = self._get_build_info() for module_name, module_info in self.project_info.iteritems(): self._changed_files[module_name] = { 'libs': [], 'assets': [], 'res': [], 'src': [], 'manifest': [], 'config': [] } self._scan_module_changes(module_name, module_info['path']) self._mark_changed_flag() return {'projects': self._changed_files, 'build_info': build_info}
def execute(self): cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') stat_cache = load_json_cache(cache_path) cache_path_md5 = os.path.join(self._config['build_cache_dir'], 'stat_cache_md5.json') stat_cache_md5 = load_json_cache(cache_path_md5) for module, file_dict in self._changed_files.iteritems(): for key, files in file_dict.iteritems(): if key != 'apt': for fpath in files: if not fpath.startswith(self._config['build_cache_dir'] ) and os.path.exists(fpath): self.debug('refresh {} stat'.format(fpath)) os.utime(fpath, None) if fpath not in stat_cache[module]: stat_cache[module][fpath] = {} if fpath in stat_cache_md5: stat_cache_md5[fpath] = get_md5(fpath) stat_cache[module][fpath][ 'mtime'] = os.path.getmtime(fpath) stat_cache[module][fpath][ 'size'] = os.path.getsize(fpath) write_json_cache(cache_path, stat_cache) write_json_cache(cache_path_md5, stat_cache_md5)
def _backup_res_changed_list(self, changed_list): respack_dir = self._finder.get_dst_res_pack_path(self._name) all_changed_list = self._get_backup_res_changed_list() for f in changed_list: if f not in all_changed_list: all_changed_list.append(f) cache = {"changed_list": all_changed_list} write_json_cache(os.path.join(respack_dir, 'rchangelist.bak'), cache)
def _append_new_related_files(self): related_files = self._get_apt_related_files() def append_files(file_list): for fpath in file_list: if fpath and fpath not in related_files: self.debug('add new related file: {}'.format(fpath)) related_files.append(fpath) append_files(self._changed_files['src']) append_files(self._changed_files['apt']) write_json_cache(self._get_apt_related_files_cache_path(), related_files)
def execute(self): cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') stat_cache = load_json_cache(cache_path) for module, file_dict in self._changed_files.iteritems(): for key, files in file_dict.iteritems(): for fpath in files: if not fpath.startswith(self._config['build_cache_dir']) and os.path.exists(fpath): self.debug('refresh {} stat'.format(fpath)) os.utime(fpath, None) if fpath not in stat_cache[module]: stat_cache[module][fpath] = {} stat_cache[module][fpath]['mtime'] = os.path.getmtime(fpath) stat_cache[module][fpath]['size'] = os.path.getsize(fpath) write_json_cache(cache_path, stat_cache)
def _get_apt_related_files(self): path = self._get_apt_related_files_cache_path() if os.path.exists(path): return load_json_cache(path) else: info_path = os.path.join(self._cache_dir, 'freeline_annotation_info.json') if os.path.exists(info_path): info_cache = load_json_cache(info_path) related_files = [] for anno, files in info_cache.iteritems(): for info in files: if 'java_path' in info and info['java_path']: related_files.append(info['java_path']) write_json_cache(self._get_apt_related_files_cache_path(), related_files) return related_files return []
def _get_apt_related_files(self, filter_tags=None): path = self._get_apt_related_files_cache_path() if os.path.exists(path): return load_json_cache(path) else: info_path = os.path.join(self._cache_dir, 'freeline_annotation_info.json') if os.path.exists(info_path): info_cache = load_json_cache(info_path) related_files = [] for anno, files in info_cache.iteritems(): if filter_tags and anno not in filter_tags: self.debug('ignore annotation: {}'.format(anno)) continue for info in files: if info['module'] == self._name or info['module'] in self._module_info['local_module_dep']: if 'java_path' in info and info['java_path']: related_files.append(info['java_path']) write_json_cache(self._get_apt_related_files_cache_path(), related_files) return related_files return []
def run_retrolambda(self): if self._is_need_javac and self._is_retrolambda_enabled: lambda_config = self._config['retrolambda'][self._name] target_dir = self._finder.get_patch_classes_cache_dir() jar_args = [Builder.get_java(self._config), '-Dretrolambda.inputDir={}'.format(target_dir), '-Dretrolambda.outputDir={}'.format(target_dir)] if lambda_config['supportIncludeFiles']: files_stat_path = os.path.join(self._cache_dir, self._name, 'lambda_files_stat.json') include_files = [] if os.path.exists(files_stat_path): files_stat = load_json_cache(files_stat_path) else: files_stat = {} for dirpath, dirnames, files in os.walk(target_dir): for fn in files: fpath = os.path.join(dirpath, fn) if fpath not in files_stat: include_files.append(fpath) self.debug('incremental build new lambda file: {}'.format(fpath)) else: if os.path.getmtime(fpath) > files_stat[fpath]['mtime']: include_files.append(fpath) self.debug('incremental build lambda file: {}'.format(fpath)) include_files_param = os.pathsep.join(include_files) if len(include_files_param) > 3496: include_files_path = os.path.join(self._cache_dir, self._name, 'retrolambda_inc.list') self.__save_parms_to_file(include_files_path, include_files) jar_args.append('-Dretrolambda.includedFile={}'.format(include_files_path)) else: jar_args.append('-Dretrolambda.includedFiles={}'.format(include_files_param)) lambda_classpaths = [target_dir, lambda_config['rtJar']] lambda_classpaths.extend(self._classpaths) param = os.pathsep.join(lambda_classpaths) if lambda_config['supportIncludeFiles'] and len(param) > 3496: classpath_file = os.path.join(self._cache_dir, self._name, 'retrolambda_classpaths.path') self.__save_parms_to_file(classpath_file, lambda_classpaths) jar_args.append('-Dretrolambda.classpathFile={}'.format(classpath_file)) else: jar_args.append('-Dretrolambda.classpath={}'.format(param)) jar_args.append('-cp') jar_args.append(lambda_config['targetJar']) jar_args.append(lambda_config['mainClass']) self.debug('retrolambda exec: ' + ' '.join(jar_args)) output, err, code = cexec(jar_args, callback=None) if code != 0: raise FreelineException('retrolambda compile failed.', '{}\n{}'.format(output, err)) if lambda_config['supportIncludeFiles']: for fpath in include_files: if fpath not in files_stat: files_stat[fpath] = {} files_stat[fpath]['mtime'] = os.path.getmtime(fpath) write_json_cache(files_stat_path, files_stat) self.debug('save lambda files stat to {}'.format(files_stat_path))
def _save_cache(self): cache_path = os.path.join(self._config['build_cache_dir'], 'stat_cache.json') if os.path.exists(cache_path): os.remove(cache_path) write_json_cache(cache_path, self._stat_cache)
def _save_cache(self): if os.path.exists(self._cache_path): os.remove(self._cache_path) write_json_cache(self._cache_path, self._stat_cache)
def execute(self): write_json_cache( os.path.join(self._config['build_cache_dir'], 'project_info_cache.json'), get_project_info(self._config))