def lookup_redirect(url): sub_url = url for sub_url, _ in Segment(url): for base, filename in Segment(sub_url): try: redirects = self._cache.GetFromFile(posixpath.normpath( posixpath.join(base, 'redirects.json'))).Get() except FileNotFoundError: continue redirect = redirects.get(posixpath.join(filename, '...')) if redirect is None: continue redirect = Join(base, redirect.rstrip('...')) # Avoid infinite redirection loops by breaking if seen before. if redirect in seen_redirects: break seen_redirects.add(redirect) return lookup_redirect( Join(redirect, posixpath.relpath(url, sub_url))) return url
def lookup_redirect(url): sub_url = url for sub_url, _ in Segment(url): for base, filename in Segment(sub_url): try: redirects = self._cache.GetFromFile( posixpath.normpath(posixpath.join(base, "redirects.json")) ).Get() except FileNotFoundError: continue redirect = redirects.get(posixpath.join(filename, "...")) if redirect is None: continue redirect = Join(base, redirect.rstrip("...")) # Avoid infinite redirection loops by breaking if seen before. if redirect in seen_redirects: break seen_redirects.add(redirect) return lookup_redirect(Join(redirect, posixpath.relpath(url, sub_url))) return url
def _RedirectFromConfig(self, url): ''' Look up redirects.json file in the directory hierarchy of |url|. Directory-level redirects occur first, followed by the specific file redirects. Returns the URL to the redirect, if any exist, or None. ''' dirname, filename = posixpath.split(url) redirected_dirname = self._RedirectDirectory(dirname) # Set up default return value. default_redirect = None if redirected_dirname != dirname: default_redirect = posixpath.normpath(Join(redirected_dirname, filename)) try: rules = self._cache.GetFromFile( posixpath.normpath(Join(redirected_dirname, 'redirects.json'))).Get() except FileNotFoundError: return default_redirect redirect = rules.get(filename) if redirect is None: return default_redirect if (redirect.startswith('/') or urlsplit(redirect).scheme in ('http', 'https')): return redirect return posixpath.normpath(Join(redirected_dirname, redirect))
def __init__(self, file_system, compiled_fs_factory, object_store_creator, platform): def create_features_cache(features_type, feature_file, *extra_paths): return _FeaturesCache( file_system, compiled_fs_factory, [Join(path, feature_file) for path in API_PATHS], extra_paths, self._platform, features_type) if platform not in GetExtensionTypes(): self._platform = PlatformToExtensionType(platform) else: self._platform = platform self._caches = { 'api': create_features_cache('api', _API_FEATURES), 'manifest': create_features_cache('manifest', _MANIFEST_FEATURES, Join(JSON_TEMPLATES, 'manifest.json')), 'permission': create_features_cache('permission', _PERMISSION_FEATURES, Join(JSON_TEMPLATES, 'permissions.json')) } # Namespace the object store by the file system ID because this class is # used by the availability finder cross-channel. self._object_store = object_store_creator.Create( _FeaturesCache, category=StringIdentity(file_system.GetIdentity(), self._platform))
def collect(api_owners): if api_owners is not None: return api_owners # Get API owners from every OWNERS file that exists. api_owners = [] for root in BROWSER_API_PATHS: for base, dirs, _ in self._host_fs.Walk(root, depth=1): for dir_ in dirs: owners_file = Join(root, base, dir_, _OWNERS) api_owners.append( self._owners_fs.GetFromFile(owners_file, skip_not_found=True)) # Add an entry for the core extensions/apps owners. def fix_core_owners(entry): entry['apiName'] = _CORE_OWNERS entry['id'] = 'core' return entry owners_file = Join(BROWSER_CHROME_EXTENSIONS, _OWNERS) api_owners.append(self._owners_fs.GetFromFile(owners_file).Then( fix_core_owners)) def sort_and_cache(api_owners): api_owners.sort(key=itemgetter('apiName')) self._cache.Set('api_owners', api_owners) return api_owners return All(api_owners).Then(sort_and_cache)
def Cron(self): futures = [self._path_canonicalizer.Cron()] for root, _, files in self.file_system.Walk(''): for f in files: futures.append(self.GetContentAndType(Join(root, f))) # Also cache the extension-less version of the file if needed. base, ext = posixpath.splitext(f) if f != SITE_VERIFICATION_FILE and ext in self._default_extensions: futures.append(self.GetContentAndType(Join(root, base))) # TODO(kalman): Cache .zip files for each directory (if supported). return Future(callback=lambda: [f.Get() for f in futures])
def Refresh(self): futures = [self._path_canonicalizer.Refresh()] for root, _, files in self.file_system.Walk(''): for f in files: futures.append(self.GetContentAndType(Join(root, f))) # Also cache the extension-less version of the file if needed. base, ext = posixpath.splitext(f) if f != SITE_VERIFICATION_FILE and ext in self._default_extensions: futures.append(self.GetContentAndType(Join(root, base))) # TODO(kalman): Cache .zip files for each directory (if supported). return All(futures, except_pass=Exception, except_pass_log=True)
def _CreateStatInfo(bucket, path): full_path = Join(bucket, path) last_commit_file = Join(bucket, LAST_COMMIT_HASH_FILENAME) try: last_commit = _ReadFile(last_commit_file) if IsDirectory(full_path): child_versions = dict( (filename, last_commit) for filename in _ListDir(full_path)) else: child_versions = None return StatInfo(last_commit, child_versions) except (TypeError, errors.Error): raise FileNotFoundError('cloudstorage.stat failed for %s: %s' % (path, traceback.format_exc()))
def GetContentAndType(self, path): '''Returns the ContentAndType of the file at |path|. ''' AssertIsValid(path) base, ext = posixpath.splitext(path) # Check for a zip file first, if zip is enabled. if self._directory_zipper and ext == '.zip': zip_future = self._directory_zipper.Zip(ToDirectory(base)) return Future(callback=lambda: ContentAndType( zip_future.Get(), 'application/zip', None)) # If there is no file extension, look for a file with one of the default # extensions. If one cannot be found, check if the path is a directory. # If it is, then check for an index file with one of the default # extensions. if not ext: new_path = self._AddExt(path) # Add a trailing / to check if it is a directory and not a file with # no extension. if new_path is None and self.file_system.Exists( ToDirectory(path)).Get(): new_path = self._AddExt(Join(path, 'index')) # If an index file wasn't found in this directly then we're never going # to find a file. if new_path is None: return FileNotFoundError.RaiseInFuture( '"%s" is a directory' % path) if new_path is not None: path = new_path return self._content_cache.GetFromFile(path)
def Refresh(self): ''' Load files during a cron run. ''' futures = [] for root, dirs, files in self._file_system.Walk(''): if 'redirects.json' in files: futures.append(self._cache.GetFromFile(Join(root, 'redirects.json'))) return All(futures)
def resolve(): try: result = {} for path in paths: full_path = Join(self._bucket, path) logging.debug('gcs: requested path "%s", reading "%s"' % (path, full_path)) if IsDirectory(path): result[path] = _ListDir(full_path) else: result[path] = _ReadFile(full_path) return result except errors.AuthorizationError: self._warnAboutAuthError() raise
def Cron(self): futures = [( '<path_canonicalizer>', # semi-arbitrary string since there is # no path associated with this Future. self._path_canonicalizer.Cron())] for root, _, files in self.file_system.Walk(''): for f in files: futures.append( (Join(root, f), self.GetContentAndType(Join(root, f)))) # Also cache the extension-less version of the file if needed. base, ext = posixpath.splitext(f) if f != SITE_VERIFICATION_FILE and ext in self._default_extensions: futures.append( (Join(root, base), self.GetContentAndType(Join(root, base)))) # TODO(kalman): Cache .zip files for each directory (if supported). def resolve(): for label, future in futures: try: future.Get() except: logging.error('%s: %s' % (label, traceback.format_exc())) return Future(callback=resolve)
def get_index_if_directory_exists(directory_exists): if not directory_exists: return None return find_file_with_name(Join(path, 'index'))
def create_features_cache(features_type, feature_file, *extra_paths): return _FeaturesCache( file_system, compiled_fs_factory, [Join(path, feature_file) for path in API_PATHS], extra_paths, self._platform, features_type)