def Refresh(self): def safe(name, action, callback): '''Safely runs |callback| for a ContentProvider called |name| by swallowing exceptions and turning them into a None return value. It's important to run all ContentProvider Refreshes even if some of them fail. ''' try: return callback() except: if not _IGNORE_MISSING_CONTENT_PROVIDERS[0]: logging.error( 'Error %s Refresh for ContentProvider "%s":\n%s' % (action, name, traceback.format_exc())) return None def refresh_provider(path, config): provider = self._CreateContentProvider(path, config) future = safe(path, 'initializing', self._CreateContentProvider(path, config).Refresh) if future is None: return Future(callback=lambda: True) return Future(callback=lambda: safe(path, 'resolving', future.Get)) return All( refresh_provider(path, config) for path, config in self._GetConfig().iteritems())
def collect(api_owners): if api_owners is not None: return api_owners # Get API owners from every OWNERS file that exists. api_owners = [] for root in BROWSER_API_PATHS: for base, dirs, _ in self._host_fs.Walk(root, depth=1): for dir_ in dirs: owners_file = Join(root, base, dir_, _OWNERS) api_owners.append( self._owners_fs.GetFromFile(owners_file, skip_not_found=True)) # Add an entry for the core extensions/apps owners. def fix_core_owners(entry): entry['apiName'] = _CORE_OWNERS entry['id'] = 'core' return entry owners_file = Join(BROWSER_CHROME_EXTENSIONS, _OWNERS) api_owners.append(self._owners_fs.GetFromFile(owners_file).Then( fix_core_owners)) def sort_and_cache(api_owners): api_owners.sort(key=itemgetter('apiName')) self._cache.Set('api_owners', api_owners) return api_owners return All(api_owners).Then(sort_and_cache)
def Refresh(self): def render(path): request = Request(path, self._request.host, self._request.headers) delegate = _SingletonRenderServletDelegate(self._server_instance) return RenderServlet(request, delegate).Get() def request_files_in_dir(path, prefix='', strip_ext=None): '''Requests every file found under |path| in this host file system, with a request prefix of |prefix|. |strip_ext| is an optional list of file extensions that should be stripped from paths before requesting. ''' def maybe_strip_ext(name): if name == SITE_VERIFICATION_FILE or not strip_ext: return name base, ext = posixpath.splitext(name) return base if ext in strip_ext else name files = [ maybe_strip_ext(name) for name, _ in CreateURLsFromPaths(master_fs, path, prefix) ] return _RequestEachItem(path, files, render) return All( request_files_in_dir(dir, prefix=prefix) for dir, prefix in _SUPPORTED_TARGETS.itervalues())
def load_features(dependency_features_list): futures = [] for dependency_features, cache_type in zip( dependency_features_list, cache_types): if dependency_features is not None: # Get cached dependencies if possible. If it has been cached, all # of its features have been resolved, so the other fields are # unnecessary. futures.append( Future(value={'resolved': dependency_features})) else: futures.append(self._caches[cache_type].GetFeatures()) def resolve(features): features_map = {} for cache_type, feature in zip(cache_types, features): # Copy down to features_map level because the 'resolved' and # 'unresolved' dicts will be modified. features_map[cache_type] = dict( (c, copy(d)) for c, d in feature.iteritems()) def has_unresolved(): '''Determines if there are any unresolved features left over in any of the categories in |dependencies|. ''' return any( cache.get('unresolved') for cache in features_map.itervalues()) # Iterate until everything is resolved. If dependencies are multiple # levels deep, it might take multiple passes to inherit data to the # topmost feature. while has_unresolved(): for cache_type, cache in features_map.iteritems(): if 'unresolved' not in cache: continue to_remove = [] for name, values in cache['unresolved'].iteritems( ): resolve_successful, feature = _ResolveFeature( name, values, cache['extra'].get(name, ()), self._platform, cache_type, features_map) if not resolve_successful: continue # Try again on the next iteration of the while loop # When successfully resolved, remove it from the unresolved # dict. Add it to the resolved dict if it didn't get deleted. to_remove.append(name) if feature is not None: cache['resolved'][name] = feature for key in to_remove: del cache['unresolved'][key] for cache_type, cache in features_map.iteritems(): self._object_store.Set(cache_type, cache['resolved']) return features_map[features_type]['resolved'] return All(futures).Then(resolve)
def Refresh(self): ''' Load files during a cron run. ''' futures = [] for root, dirs, files in self._file_system.Walk(''): if 'redirects.json' in files: futures.append(self._cache.GetFromFile(Join(root, 'redirects.json'))) return All(futures)
def Cron(self): futures = [] for platform in GetPlatforms(): futures += [ self._GetImpl(platform, name) for name in self._platform_bundle.GetAPIModels(platform).GetNames() ] return All(futures, except_pass=FileNotFoundError)
def get_platform_schemas(platform): # Internal APIs are an internal implementation detail. Do not pass them to # templates. return All([get_api_schema(platform, api) for api in self._platform_bundle.GetAPIModels(platform) .GetNames() if self._platform_bundle.GetAPICategorizer(platform) .GetCategory(api) != 'internal'], except_pass=FileNotFoundError)
def Cron(self): futures = [] for root, _, files in self._file_system.Walk(self._dir): futures += [ self._template_cache.GetFromFile( posixpath.join(self._dir, root, FormatKey(f))) for f in files if posixpath.splitext(f)[1] == '.html' ] return All(futures)
def Refresh(self): futures = [self._path_canonicalizer.Refresh()] for root, _, files in self.file_system.Walk(''): for f in files: futures.append(self.GetContentAndType(Join(root, f))) # Also cache the extension-less version of the file if needed. base, ext = posixpath.splitext(f) if f != SITE_VERIFICATION_FILE and ext in self._default_extensions: futures.append(self.GetContentAndType(Join(root, base))) # TODO(kalman): Cache .zip files for each directory (if supported). return All(futures, except_pass=Exception, except_pass_log=True)
def SetMulti(self, mapping): entities = [PersistentObjectStoreItem.CreateItem( self._namespace, key, value) for key, value in mapping.iteritems()] # Some entites may be None if they were too large to insert. Skip those. rpcs = [db.put_async(entity for entity in entities if entity)] # If running the dev server, the futures don't complete until the server is # *quitting*. This is annoying. Flush now. if IsDevServer(): [rpc.wait() for rpc in rpcs] return All(Future(callback=lambda: rpc.get_result()) for rpc in rpcs)
def SetMulti(self, mapping): rpcs = [ db.put_async( PersistentObjectStoreItem.CreateItem(self._namespace, key, value)) for key, value in mapping.iteritems() ] # If running the dev server, the futures don't complete until the server is # *quitting*. This is annoying. Flush now. if IsDevServer(): [rpc.wait() for rpc in rpcs] return All(Future(callback=lambda: rpc.get_result()) for rpc in rpcs)
def find_file_with_name(name): '''Tries to find a file in the file system called |name| with one of the default extensions of this content provider. If none is found, returns None. ''' paths = [name + ext for ext in self._default_extensions] def get_first_path_which_exists(existence): for exists, path in zip(existence, paths): if exists: return path return None return (All(self.file_system.Exists(path) for path in paths) .Then(get_first_path_which_exists))
def Refresh(self): def get_api_schema(platform, api): return self._GetSchemaView(platform, api) def get_platform_schemas(platform): return All([ get_api_schema(platform, api) for api in self._platform_bundle.GetAPIModels(platform).GetNames() ], except_pass=FileNotFoundError) return All( [get_platform_schemas(platform) for platform in GetPlatforms()])
def Read(self, paths, skip_not_found=False): # Directory content is formatted in JSON in Gitiles as follows: # # { # "id": "12a5464de48d2c46bc0b2dc78fafed75aab554fa", # The tree ID. # "entries": [ # { # "mode": 33188, # "type": "blob", # "id": "ab971ca447bc4bce415ed4498369e00164d91cb6", # File ID. # "name": ".gitignore" # }, # ... # ] # } def list_dir(json_data): entries = _ParseGitilesJson(json_data).get('entries', []) return [ e['name'] + ('/' if e['type'] == 'tree' else '') for e in entries ] def fixup_url_format(path): # By default, Gitiles URLs display resources in HTML. To get resources # suitable for our consumption, a '?format=' string must be appended to # the URL. The format may be one of 'JSON' or 'TEXT' for directory or # text resources, respectively. return path + (_JSON_FORMAT if IsDirectory(path) else _TEXT_FORMAT) # A list of tuples of the form (path, Future). fetches = [(path, self._FetchAsync(fixup_url_format(path))) for path in paths] def parse_contents(results): value = {} for path, content in izip(paths, results): if content is None: continue # Gitiles encodes text content in base64 (see # http://tools.ietf.org/html/rfc4648 for info about base64). value[path] = (list_dir if IsDirectory(path) else b64decode)(content) return value return All( self._ResolveFetchContent(path, future, skip_not_found) for path, future in fetches).Then(parse_contents)
def file_lister(root): res, root_stat = All( (self._walk_cache.Get(root), self.StatAsync(root))).Get() if res and res[2] == root_stat.version: dirs, files = res[0], res[1] else: # Wasn't cached, or not up to date. dirs, files = [], [] for f in self.ReadSingle(root).Get(): if IsDirectory(f): dirs.append(f) else: files.append(f) # Update the cache. This is a root -> (dirs, files, version) mapping. self._walk_cache.Set(root, (dirs, files, root_stat.version)) return dirs, files
def Get(self): object_store_creator = ObjectStoreCreator(start_empty=False) commit_tracker = CommitTracker(object_store_creator) def generate_response(result): commit_id, history = result history_log = ''.join('%s: %s<br>' % (entry.datetime, entry.commit_id) for entry in reversed(history)) response = 'Current commit: %s<br><br>Most recent commits:<br>%s' % ( commit_id, history_log) return response commit_name = self._request.path id_future = commit_tracker.Get(commit_name) history_future = commit_tracker.GetHistory(commit_name) return Response.Ok( All((id_future, history_future)).Then(generate_response).Get())
def SetMulti(self, mapping): self._cache.update(mapping) return All([ object_store.SetMulti(mapping) for object_store in self._object_stores ])
def load_features(dependency_features_list): futures = [] for dependency_features, cache_type in zip( dependency_features_list, cache_types): if dependency_features is not None: # Get cached dependencies if possible. If it has been cached, all # of its features have been resolved, so the other fields are # unnecessary. futures.append( Future(value={'resolved': dependency_features})) else: futures.append(self._caches[cache_type].GetFeatures()) def resolve(features): features_map = {} for cache_type, feature in zip(cache_types, features): # Copy down to features_map level because the 'resolved' and # 'unresolved' dicts will be modified. features_map[cache_type] = dict( (c, copy(d)) for c, d in feature.iteritems()) def has_unresolved(): '''Determines if there are any unresolved features left over in any of the categories in |dependencies|. ''' return any( cache.get('unresolved') for cache in features_map.itervalues()) def get_unresolved(): '''Returns a dictionary of unresolved features mapping the type of features to the list of unresolved feature names ''' unresolved = {} for cache_type, cache in features_map.iteritems(): if 'unresolved' not in cache: continue unresolved[cache_type] = cache['unresolved'].keys() return unresolved # Iterate until we can't resolve any more features. If dependencies # are multiple levels deep, it might take multiple passes to inherit # data to the topmost feature. any_resolved = True while has_unresolved() and any_resolved: any_resolved = False for cache_type, cache in features_map.iteritems(): if 'unresolved' not in cache: continue to_remove = [] for name, values in cache['unresolved'].iteritems( ): resolve_successful, feature = _ResolveFeature( name, values, cache['extra'].get(name, ()), self._platform, cache_type, features_map) if not resolve_successful: continue # Try again on the next iteration of the while loop if resolve_successful: any_resolved = True # When successfully resolved, remove it from the unresolved # dict. Add it to the resolved dict if it didn't get deleted. to_remove.append(name) if feature is not None: cache['resolved'][name] = feature for key in to_remove: del cache['unresolved'][key] # TODO(karandeepb): Add a test to ensure that all features are # correctly resolved. if has_unresolved(): logging.error('Some features were left unresolved ' + str(get_unresolved())) for cache_type, cache in features_map.iteritems(): self._object_store.Set(cache_type, cache['resolved']) return features_map[features_type]['resolved'] return All(futures).Then(resolve)
def Refresh(self): return All( self.GetAPIModels(platform).Refresh() for platform in self._platform_data.keys())
def Refresh(self, path): platform, api = path.split('/') logging.info('Refreshing %s/%s' % (platform, api)) future = self._GetImpl(platform, api) return All([future], except_pass=FileNotFoundError)
def testAll(self): def callback_with_value(value): return MockFunction(lambda: value) # Test a single value. callback = callback_with_value(42) future = All((Future(callback=callback), )) self.assertTrue(*callback.CheckAndReset(0)) self.assertEqual([42], future.Get()) self.assertTrue(*callback.CheckAndReset(1)) # Test multiple callbacks. callbacks = (callback_with_value(1), callback_with_value(2), callback_with_value(3)) future = All(Future(callback=callback) for callback in callbacks) for callback in callbacks: self.assertTrue(*callback.CheckAndReset(0)) self.assertEqual([1, 2, 3], future.Get()) for callback in callbacks: self.assertTrue(*callback.CheckAndReset(1)) # Test throwing an error. def throws_error(): raise ValueError() callbacks = (callback_with_value(1), callback_with_value(2), MockFunction(throws_error)) future = All(Future(callback=callback) for callback in callbacks) for callback in callbacks: self.assertTrue(*callback.CheckAndReset(0)) self.assertRaises(ValueError, future.Get) for callback in callbacks: # Can't check that the callbacks were actually run because in theory the # Futures can be resolved in any order. callback.CheckAndReset(0) # Test throwing an error with except_pass. future = All((Future(callback=callback) for callback in callbacks), except_pass=ValueError) for callback in callbacks: self.assertTrue(*callback.CheckAndReset(0)) self.assertEqual([1, 2, None], future.Get())
def Cron(self): return All([self._GetImpl(platform) for platform in GetPlatforms()])
def Cron(self): return All(self.GetAPIModels(platform).Cron() for platform in self._platform_data)
def get_platform_schemas(platform): return All([ get_api_schema(platform, api) for api in self._platform_bundle.GetAPIModels(platform).GetNames() ], except_pass=FileNotFoundError)
def Refresh(self): return All(self._GetImpl(platform) for platform in GetPlatforms())
def Cron(self): futures = [self.GetModel(name) for name in self.GetNames()] return All(futures, except_pass=(FileNotFoundError, ValueError))