def GetModel(self, api_name): # Callers sometimes specify a filename which includes .json or .idl - if # so, believe them. They may even include the 'api/' prefix. if os.path.splitext(api_name)[1] in ('.json', '.idl'): if not api_name.startswith(API_PATH + '/'): api_name = posixpath.join(API_PATH, api_name) return self._model_cache.GetFromFile(api_name) assert not api_name.startswith(API_PATH) # API names are given as declarativeContent and app.window but file names # will be declarative_content and app_window. file_name = UnixName(api_name).replace('.', '_') # Devtools APIs are in API_PATH/devtools/ not API_PATH/, and have their # "devtools" names removed from the file names. basename = posixpath.basename(file_name) if basename.startswith('devtools_'): file_name = posixpath.join( 'devtools', file_name.replace(basename, basename[len('devtools_'):])) futures = [self._model_cache.GetFromFile('%s/%s.%s' % (API_PATH, file_name, ext)) for ext in ('json', 'idl')] def resolve(): for future in futures: try: return future.Get() except FileNotFoundError: pass # Propagate the first FileNotFoundError if neither were found. futures[0].Get() return Future(delegate=Gettable(resolve))
def _GenerateWhatsNewDict(self): whats_new_json_future = self._parse_cache.GetFromFile( posixpath.join(JSON_TEMPLATES, 'whats_new.json')) def _MakeDictByPlatform(platform): whats_new_json = whats_new_json_future.Get() platform_list = [] apis = self._GenerateApiListWithVersion(platform) apis.extend( self._GenerateChangesListWithVersion(platform, whats_new_json)) apis.sort(key=itemgetter('version'), reverse=True) for version, group in groupby(apis, key=itemgetter('version')): whats_new_by_version = { 'version': version, } for item in group: item_type = item['type'] if item_type not in whats_new_by_version: whats_new_by_version[item_type] = [] whats_new_by_version[item_type].append(item) platform_list.append(whats_new_by_version) return platform_list def resolve(): return { 'apps': _MakeDictByPlatform('apps'), 'extensions': _MakeDictByPlatform('extensions') } return Future(delegate=Gettable(resolve))
def _GetImpl(self, path, reader, version_getter): # Strategy: Get the current version of |path| in main FileSystem, then run # through |_compiled_fs_chain| in *reverse* to find the "oldest" FileSystem # with an up-to-date version of that file. # # Obviously, if files have been added in the main FileSystem then none of # the older FileSystems will be able to find it. read_futures = [(reader(compiled_fs), compiled_fs) for compiled_fs in self._compiled_fs_chain] def resolve(): try: first_compiled_fs = self._compiled_fs_chain[0] # The first file system contains both files of a newer version and # files shared with other compiled file systems. We are going to try # each compiled file system in the reverse order and return the data # when version matches. Data cached in other compiled file system will # be reused whenever possible so that we don't need to recompile things # that are not changed across these file systems. first_version = version_getter(first_compiled_fs) for read_future, compiled_fs in reversed(read_futures): if version_getter(compiled_fs) == first_version: return read_future.Get() except FileNotFoundError: pass # Try an arbitrary operation again to generate a realistic stack trace. return read_futures[0][0].Get() return Future(delegate=Gettable(resolve))
def GetFromFile(self, path): '''Calls |compilation_function| on the contents of the file at |path|. If |binary| is True then the file will be read as binary - but this will only apply for the first time the file is fetched; if already cached, |binary| will be ignored. ''' AssertIsFile(path) try: version = self._file_system.Stat(path).version except FileNotFoundError: return Future(exc_info=sys.exc_info()) cache_entry = self._file_object_store.Get(path).Get() if (cache_entry is not None) and (version == cache_entry.version): return Future(value=cache_entry._cache_data) future_files = self._file_system.ReadSingle(path) def resolve(): cache_data = self._compilation_function(path, future_files.Get()) self._file_object_store.Set(path, _CacheEntry(cache_data, version)) return cache_data return Future(delegate=Gettable(resolve))
def _CreatePermissionsData(self): api_features_future = self._features_bundle.GetAPIFeatures() permission_features_future = self._features_bundle.GetPermissionFeatures( ) def resolve(): permission_features = permission_features_future.Get() _AddDependencyDescriptions(permission_features, api_features_future.Get()) # Turn partial templates into descriptions, ensure anchors are set. for permission in permission_features.values(): if not 'anchor' in permission: permission['anchor'] = permission['name'] if 'partial' in permission: permission[ 'description'] = self._template_cache.GetFromFile( PRIVATE_TEMPLATES + permission['partial']).Get() del permission['partial'] def filter_for_platform(permissions, platform): return _ListifyPermissions( features.Filtered(permissions, platform)) return { 'declare_apps': filter_for_platform(permission_features, 'apps'), 'declare_extensions': filter_for_platform(permission_features, 'extensions') } return Future(delegate=Gettable(resolve))
def FetchAsync(self, url, **kwargs): self._fetch_async_count += 1 future = self._fetcher.FetchAsync(url, **kwargs) def resolve(): self._fetch_resolve_count += 1 return future.Get() return Future(delegate=Gettable(resolve))
def GetAPIFeatures(self): api_features = self._object_store.Get('api_features').Get() if api_features is not None: return Future(value=api_features) api_features_future = self._api_cache.GetFeatures() manifest_features_future = self._manifest_cache.GetFeatures() permission_features_future = self._permission_cache.GetFeatures() def resolve(): api_features = api_features_future.Get() manifest_features = manifest_features_future.Get() permission_features = permission_features_future.Get() # TODO(rockot): Handle inter-API dependencies more gracefully. # Not yet a problem because there is only one such case (windows -> tabs). # If we don't store this value before annotating platforms, inter-API # dependencies will lead to infinite recursion. for feature in api_features.itervalues(): _AddPlatformsFromDependencies(feature, api_features, manifest_features, permission_features) self._object_store.Set('api_features', api_features) return api_features return Future(delegate=Gettable(resolve))
def GetContentAndType(self, path): '''Returns the ContentAndType of the file at |path|. ''' AssertIsValid(path) base, ext = posixpath.splitext(path) # Check for a zip file first, if zip is enabled. if self._directory_zipper and ext == '.zip': zip_future = self._directory_zipper.Zip(ToDirectory(base)) return Future(delegate=Gettable( lambda: ContentAndType(zip_future.Get(), 'application/zip'))) # If there is no file extension, look for a file with one of the default # extensions. # # Note that it would make sense to guard this on Exists(path), since a file # without an extension may actually exist, but it's such an uncommon case # it hardly seems worth the potential performance hit. if not ext: for default_ext in self._default_extensions: if self.file_system.Exists(path + default_ext).Get(): path += default_ext break return self._content_cache.GetFromFile(path)
def RaiseInFuture(cls, message): stack = traceback.format_stack() def boom(): raise cls('%s. Creation stack:\n%s' % (message, ''.join(stack))) return Future(delegate=Gettable(boom))
def ReadSingle(self, path): '''Reads a single file from the FileSystem. Returns a Future with the same rules as Read(). ''' AssertIsValid(path) read_single = self.Read([path]) return Future(delegate=Gettable(lambda: read_single.Get()[path]))
def run_cron_for_future(target): title = target.__class__.__name__ future, init_timer = TimerClosure(target.Cron) assert isinstance( future, Future), ('%s.Cron() did not return a Future' % title) def resolve(): resolve_timer = Timer() try: future.Get() except Exception as e: _cronlog.error('%s: error %s' % (title, traceback.format_exc())) results.append(False) if IsDeadlineExceededError(e): raise finally: resolve_timer.Stop() _cronlog.info( '%s took %s: %s to initialize and %s to resolve' % (title, init_timer.With(resolve_timer).FormatElapsed(), init_timer.FormatElapsed(), resolve_timer.FormatElapsed())) return Future(delegate=Gettable(resolve))
def GetFromFileListing(self, path): '''Calls |compilation_function| on the listing of the files at |path|. Assumes that the path given is to a directory. ''' if not path.endswith('/'): path += '/' try: version = self._file_system.Stat(path).version except FileNotFoundError: return Future(exc_info=sys.exc_info()) cache_entry = self._list_object_store.Get(path).Get() if (cache_entry is not None) and (version == cache_entry.version): return Future(value=cache_entry._cache_data) recursive_list_future = self._RecursiveList(path) def resolve(): cache_data = self._compilation_function( path, recursive_list_future.Get()) self._list_object_store.Set(path, _CacheEntry(cache_data, version)) return cache_data return Future(delegate=Gettable(resolve))
def Cron(self): futures = [ self._cache.GetFromFile('%s/%s_sidenav.json' % (JSON_TEMPLATES, platform)) for platform in ('apps', 'extensions') ] return Future(delegate=Gettable(lambda: [f.Get() for f in futures]))
def Read(self, paths, binary=False): '''Returns a directory mapping |paths| to the contents of the file at each path. If path ends with a '/', it is treated as a directory and is mapped to a list of filenames in that directory. |binary| is ignored. ''' names = self._GetNamelist() if not names: # No files in this repository. def raise_file_not_found(): raise FileNotFoundError('No paths can be found, repository is empty') return Future(delegate=Gettable(raise_file_not_found)) else: prefix = names[0].split('/')[0] reads = {} for path in paths: full_path = posixpath.join(prefix, path) if path == '' or path.endswith('/'): # If path is a directory... trimmed_paths = [] for f in filter(lambda s: s.startswith(full_path), names): if not '/' in f[len(full_path):-1] and not f == full_path: trimmed_paths.append(f[len(full_path):]) reads[path] = trimmed_paths else: try: reads[path] = self._repo_zip.Get().read(full_path) except KeyError as error: return Future(exc_info=(FileNotFoundError, FileNotFoundError(error), sys.exc_info()[2])) return Future(value=reads)
def FetchAsync(self, url): self._async_count += 1 url = url.rsplit('?', 1)[0] def resolve(): self._async_resolve_count += 1 return self._DoFetch(url) return Future(delegate=Gettable(resolve))
def Cron(self): ''' Load files during a cron run. ''' futures = [] for root, dirs, files in self._file_system.Walk(''): if 'redirects.json' in files: futures.append( self._cache.GetFromFile(posixpath.join(root, 'redirects.json'))) return Future(delegate=Gettable(lambda: [f.Get() for f in futures]))
def _RecursiveList(self, path): '''Returns a Future containing the recursive directory listing of |path| as a flat list of paths. ''' def split_dirs_from_files(paths): '''Returns a tuple (dirs, files) where |dirs| contains the directory names in |paths| and |files| contains the files. ''' result = [], [] for path in paths: result[0 if path.endswith('/') else 1].append(path) return result def add_prefix(prefix, paths): return [prefix + path for path in paths] # Read in the initial list of files. Do this eagerly (i.e. not part of the # asynchronous Future contract) because there's a greater chance to # parallelise fetching with the second layer (can fetch multiple paths). try: first_layer_dirs, first_layer_files = split_dirs_from_files( self._file_system.ReadSingle(path).Get()) except FileNotFoundError: return Future(exc_info=sys.exc_info()) if not first_layer_dirs: return Future(value=first_layer_files) second_layer_listing = self._file_system.Read( add_prefix(path, first_layer_dirs)) def resolve(): def get_from_future_listing(futures): '''Recursively lists files from directory listing |futures|. ''' dirs, files = [], [] for dir_name, listing in futures.Get().iteritems(): new_dirs, new_files = split_dirs_from_files(listing) # |dirs| are paths for reading. Add the full prefix relative to # |path| so that |file_system| can find the files. dirs += add_prefix(dir_name, new_dirs) # |files| are not for reading, they are for returning to the caller. # This entire function set (i.e. GetFromFileListing) is defined to # not include the fetched-path in the result, however, |dir_name| # will be prefixed with |path|. Strip it. assert dir_name.startswith(path) files += add_prefix(dir_name[len(path):], new_files) if dirs: files += get_from_future_listing( self._file_system.Read(dirs)) return files return first_layer_files + get_from_future_listing( second_layer_listing) return Future(delegate=Gettable(resolve))
def Cron(self): futures = [self._path_canonicalizer.Cron()] for root, _, files in self.file_system.Walk(''): for f in files: futures.append(self.GetContentAndType(Join(root, f))) # Also cache the extension-less version of the file if needed. base, ext = posixpath.splitext(f) if f != SITE_VERIFICATION_FILE and ext in self._default_extensions: futures.append(self.GetContentAndType(Join(root, base))) # TODO(kalman): Cache .zip files for each directory (if supported). return Future(delegate=Gettable(lambda: [f.Get() for f in futures]))
def GetContentAndType(self, host, path): path = path.lstrip('/') base, ext = os.path.splitext(path) # Check for a zip file first, if zip is enabled. if self._directory_zipper and ext == '.zip': zip_future = self._directory_zipper.Zip(base) return Future(delegate=Gettable( lambda: ContentAndType(zip_future.Get(), 'application/zip'))) return self._content_cache.GetFromFile(path, binary=True)
def Read(self, paths, binary=False): '''Reads |paths| from |_file_system|, then applies the most recent update from |_updates|, if any. ''' self._read_count += 1 future_result = self._file_system.Read(paths, binary=binary) def resolve(): self._read_resolve_count += 1 result = future_result.Get() for path in result.iterkeys(): _, update = self._GetMostRecentUpdate(path) if update is not None: result[path] = update return result return Future(delegate=Gettable(resolve))
def Read(self, paths, binary=False): # Maintain reverse mapping so the result can be mapped to the original # paths given (the result from |file_system| will include |root| in the # result, which would be wrong). prefixed_paths = {} def prefix(path): prefixed = posixpath.join(self._root, path) prefixed_paths[prefixed] = path return prefixed future_result = self._file_system.Read( tuple(prefix(path) for path in paths), binary=binary) def resolve(): return dict((prefixed_paths[path], content) for path, content in future_result.Get().iteritems()) return Future(delegate=Gettable(resolve))
def Read(self, paths): def resolve(): result = {} for path in paths: AssertIsValid(path) full_path = os.path.join( self._base_path, _ConvertToFilepath(path).lstrip(os.sep)) if path == '' or path.endswith('/'): result[path] = _ListDir(full_path) else: result[path] = _ReadFile(full_path) return result return Future(delegate=Gettable(resolve))
def Read(self, paths, binary=False): patched_files = set() added, deleted, modified = self._patcher.GetPatchedFiles() if set(paths) & set(deleted): def raise_file_not_found(): raise FileNotFoundError('Files are removed from the patch.') return Future(delegate=Gettable(raise_file_not_found)) patched_files |= (set(added) | set(modified)) dir_paths = set(path for path in paths if path.endswith('/')) file_paths = set(paths) - dir_paths patched_paths = file_paths & patched_files unpatched_paths = file_paths - patched_files return Future(delegate=_AsyncFetchFuture( self._base_file_system.Read(unpatched_paths, binary), self._patcher.Apply(patched_paths, self._base_file_system, binary), self._TryReadDirectory(dir_paths, binary), self))
def _LoadCache(self): cached_future = self._cache.GetMulti( ('canonical_paths', 'simplified_paths_map')) def resolve(): # |canonical_paths| is the pre-calculated set of canonical paths. # |simplified_paths_map| is a lazily populated mapping of simplified file # names to a list of full paths that contain them. For example, # - browseraction: [extensions/browserAction.html] # - storage: [apps/storage.html, extensions/storage.html] cached = cached_future.Get() canonical_paths, simplified_paths_map = ( cached.get('canonical_paths'), cached.get('simplified_paths_map')) if canonical_paths is None: assert simplified_paths_map is None canonical_paths = set() simplified_paths_map = defaultdict(list) for base, dirs, files in self._file_system.Walk(''): for path in dirs + files: path_without_ext, ext = posixpath.splitext(path) canonical_path = posixpath.join(base, path_without_ext) if (ext not in self._strip_extensions or path == SITE_VERIFICATION_FILE): canonical_path += ext canonical_paths.add(canonical_path) simplified_paths_map[_SimplifyFileName(path)].append( canonical_path) # Store |simplified_paths_map| sorted. Ties in length are broken by # taking the shortest, lexicographically smallest path. for path_list in simplified_paths_map.itervalues(): path_list.sort(key=lambda p: (len(p), p)) self._cache.SetMulti({ 'canonical_paths': canonical_paths, 'simplified_paths_map': simplified_paths_map, }) else: assert simplified_paths_map is not None return canonical_paths, simplified_paths_map return Future(delegate=Gettable(resolve))
def Read(self, paths): '''Returns a directory mapping |paths| to the contents of the file at each path. If path ends with a '/', it is treated as a directory and is mapped to a list of filenames in that directory. ''' self._EnsureRepoZip() def resolve(): repo_zip = self._repo_zip.Get() reads = {} for path in paths: if path not in repo_zip.Paths(): raise FileNotFoundError('"%s": %s not found' % (self._repo_key, path)) if path == '' or path.endswith('/'): reads[path] = repo_zip.List(path) else: reads[path] = repo_zip.Read(path) return reads return Future(delegate=Gettable(resolve))
def Read(self, paths): def resolve(): try: result = {} for path in paths: full_path = Join(self._bucket, path) logging.debug('gcs: requested path "%s", reading "%s"' % (path, full_path)) if IsDirectory(path): result[path] = _ListDir(full_path) else: result[path] = _ReadFile(full_path) return result except errors.AuthorizationError: self._warnAboutAuthError() raise return Future(delegate=Gettable(resolve))
def Cron(self): def safe(name, action, callback): '''Safely runs |callback| for a ContentProvider called |name|. It's important to run all ContentProvider Cron's even if some of them fail. ''' try: return callback() except: logging.error('Error %s Cron for ContentProvider "%s": %s' % (action, name, traceback.format_exc())) return None futures = [(name, safe(name, 'initializing', self._CreateContentProvider(name, config).Cron)) for name, config in self._GetConfig().iteritems()] return Future(delegate=Gettable( lambda: [safe(name, 'resolving', f.Get) for name, f in futures if f]))
def _CreateManifestData(self): future_manifest_features = self._features_bundle.GetManifestFeatures() def resolve(): manifest_features = future_manifest_features.Get() def for_templates(manifest_features, platform): return _AddLevelAnnotations( _ListifyAndSortDocs(ConvertDottedKeysToNested( features_utility.Filtered(manifest_features, platform + 's')), app_name=platform.capitalize())) return { 'apps': for_templates(manifest_features, 'app'), 'extensions': for_templates(manifest_features, 'extension') } return Future(delegate=Gettable(resolve))
def Cron(self): def safe(name, action, callback): '''Safely runs |callback| for a ContentProvider called |name| by swallowing exceptions and turning them into a None return value. It's important to run all ContentProvider Crons even if some of them fail. ''' try: return callback() except: if not _IGNORE_MISSING_CONTENT_PROVIDERS[0]: logging.error('Error %s Cron for ContentProvider "%s":\n%s' % (action, name, traceback.format_exc())) return None futures = [(name, safe(name, 'initializing', self._CreateContentProvider(name, config).Cron)) for name, config in self._GetConfig().iteritems()] return Future(delegate=Gettable( lambda: [safe(name, 'resolving', f.Get) for name, f in futures if f]))
def Exists(self, path): '''Returns a Future to the existence of |path|; True if |path| exists, False if not. This method will not throw a FileNotFoundError unlike the Read* methods, however it may still throw a FileSystemError. There are several ways to implement this method via the interface but this method exists to do so in a canonical and most efficient way for caching. ''' AssertIsValid(path) if path == '': # There is always a root directory. return Future(value=True) parent, base = SplitParent(path) list_future = self.ReadSingle(ToDirectory(parent)) def resolve(): try: return base in list_future.Get() except FileNotFoundError: return False return Future(delegate=Gettable(resolve))