def _CreateMemcacheFileSystem(branch, branch_memcache): svn_url = _GetURLFromBranch(branch) + '/' + EXTENSIONS_PATH stat_fetcher = AppEngineUrlFetcher( svn_url.replace(url_constants.SVN_URL, url_constants.VIEWVC_URL)) fetcher = AppEngineUrlFetcher(svn_url) return MemcacheFileSystem(SubversionFileSystem(fetcher, stat_fetcher), branch_memcache)
def CreateOnline(channel): '''Creates/creates an online server instance, meaning that both local and subversion/github resources are queried. ''' branch_utility = ServerInstance._GetOrCreateBranchUtility() branch = branch_utility.GetBranchNumberForChannelName(channel) if branch == 'trunk': svn_url = '/'.join((url_constants.SVN_TRUNK_URL, 'src', svn_constants.EXTENSIONS_PATH)) else: svn_url = '/'.join((url_constants.SVN_BRANCH_URL, branch, 'src', svn_constants.EXTENSIONS_PATH)) viewvc_url = svn_url.replace(url_constants.SVN_URL, url_constants.VIEWVC_URL) object_store_creator_factory = ObjectStoreCreator.Factory( GetAppVersion(), branch, start_empty=True) svn_file_system = CachingFileSystem( SubversionFileSystem(AppEngineUrlFetcher(svn_url), AppEngineUrlFetcher(viewvc_url)), object_store_creator_factory) return ServerInstance(channel, object_store_creator_factory, svn_file_system, ServerInstance._GetOrCreateGithubFileSystem())
def __init__(self, url, blobstore, object_store_creator): # If we key the password store on the app version then the whole advantage # of having it in the first place is greatly lessened (likewise it should # always start populated). password_store = object_store_creator.Create(GithubFileSystem, app_version=None, category='password', start_empty=False) if USERNAME is None: password_data = password_store.GetMulti( ('username', 'password')).Get() self._username, self._password = (password_data.get('username'), password_data.get('password')) else: password_store.SetMulti({ 'username': USERNAME, 'password': PASSWORD }) self._username, self._password = (USERNAME, PASSWORD) self._url = url self._fetcher = AppEngineUrlFetcher(url) self._blobstore = blobstore self._stat_object_store = object_store_creator.Create(GithubFileSystem) self._version = None self._GetZip(self.Stat(ZIP_KEY).version)
def Create(branch='trunk', revision=None): if branch == 'trunk': svn_path = 'trunk/src' else: svn_path = 'branches/%s/src' % branch return SubversionFileSystem( AppEngineUrlFetcher('%s/%s' % (url_constants.SVN_URL, svn_path)), AppEngineUrlFetcher('%s/%s' % (url_constants.VIEWVC_URL, svn_path)), svn_path, revision=revision)
def setUp(self): ConfigureFakeFetchers(os.path.join(sys.path[0], os.pardir)) self._base_path = os.path.join(sys.path[0], 'test_data', 'github_file_system') self._file_system = GithubFileSystem( AppEngineUrlFetcher(url_constants.GITHUB_URL), AppEngineBlobstore())
def CreateServerInstanceForChannel(self, channel): base_object_store_creator = ObjectStoreCreator(channel, start_empty=False) # TODO(fj): Use OfflineFileSystem here once all json/idl files in api/ # are pulled into data store by cron jobs. base_file_system = CachingFileSystem( self._delegate.CreateHostFileSystemForBranch(channel), base_object_store_creator) base_compiled_fs_factory = CompiledFileSystem.Factory( base_file_system, base_object_store_creator) object_store_creator = ObjectStoreCreator('trunk@%s' % self._issue, start_empty=False) rietveld_patcher = CachingRietveldPatcher( RietveldPatcher( svn_constants.EXTENSIONS_PATH, self._issue, AppEngineUrlFetcher(url_constants.CODEREVIEW_SERVER)), object_store_creator) patched_file_system = PatchedFileSystem(base_file_system, rietveld_patcher) patched_compiled_fs_factory = CompiledFileSystem.Factory( patched_file_system, object_store_creator) compiled_fs_factory = ChainedCompiledFileSystem.Factory([ (patched_compiled_fs_factory, patched_file_system), (base_compiled_fs_factory, base_file_system) ]) return ServerInstance( channel, object_store_creator, patched_file_system, self._delegate.CreateAppSamplesFileSystem( base_object_store_creator), '/_patch/%s' % self._issue, compiled_fs_factory)
def CreateServerInstance(self): object_store_creator = ObjectStoreCreator(start_empty=False) branch_utility = self._delegate.CreateBranchUtility( object_store_creator) host_file_system_creator = self._delegate.CreateHostFileSystemCreator( object_store_creator) # offline=False because a patch can rely on files that are already in SVN # repository but not yet pulled into data store by cron jobs (a typical # example is to add documentation for an existing API). base_file_system = CachingFileSystem( host_file_system_creator.Create(offline=False), object_store_creator) base_compiled_fs_factory = CompiledFileSystem.Factory( base_file_system, object_store_creator) rietveld_patcher = CachingRietveldPatcher( RietveldPatcher( svn_constants.EXTENSIONS_PATH, self._issue, AppEngineUrlFetcher(url_constants.CODEREVIEW_SERVER)), object_store_creator) patched_file_system = PatchedFileSystem(base_file_system, rietveld_patcher) patched_compiled_fs_factory = CompiledFileSystem.Factory( patched_file_system, object_store_creator) compiled_fs_factory = ChainedCompiledFileSystem.Factory([ (patched_compiled_fs_factory, patched_file_system), (base_compiled_fs_factory, base_file_system) ]) return ServerInstance( object_store_creator, patched_file_system, self._delegate.CreateAppSamplesFileSystem(object_store_creator), '/_patch/%s' % self._issue, compiled_fs_factory, branch_utility, host_file_system_creator)
def Create(branch='master', commit=None): if commit: base_url = '%s/%s' % (GITILES_BASE, commit) elif branch is 'master': base_url = '%s/master' % GITILES_BASE else: base_url = '%s/%s' % (GITILES_BRANCH_BASE, branch) return GitilesFileSystem(AppEngineUrlFetcher(), base_url, branch, commit)
def _GetOrCreateGithubFileSystem(): # Initialising github is pointless if samples are disabled, since it's only # used for apps samples. if ServerInstance.github_file_system is None: if _IsSamplesDisabled(): ServerInstance.github_file_system = EmptyDirFileSystem() else: ServerInstance.github_file_system = GithubFileSystem( AppEngineUrlFetcher(url_constants.GITHUB_URL), AppEngineBlobstore()) return ServerInstance.github_file_system
def CreateServerInstance(self): # start_empty=False because a patch can rely on files that are already in # SVN repository but not yet pulled into data store by cron jobs (a typical # example is to add documentation for an existing API). object_store_creator = ObjectStoreCreator(start_empty=False) unpatched_file_system = self._delegate.CreateHostFileSystemProvider( object_store_creator).GetTrunk() rietveld_patcher = CachingRietveldPatcher( RietveldPatcher( self._issue, AppEngineUrlFetcher(url_constants.CODEREVIEW_SERVER)), object_store_creator) patched_file_system = PatchedFileSystem(unpatched_file_system, rietveld_patcher) patched_host_file_system_provider = ( self._delegate.CreateHostFileSystemProvider( object_store_creator, # The patched file system needs to be online otherwise it'd be # impossible to add files in the patches. offline=False, # The trunk file system for this creator should be the patched one. default_trunk_instance=patched_file_system)) combined_compiled_fs_factory = ChainedCompiledFileSystem.Factory( [unpatched_file_system], object_store_creator) branch_utility = self._delegate.CreateBranchUtility( object_store_creator) server_instance = ServerInstance( object_store_creator, combined_compiled_fs_factory, branch_utility, patched_host_file_system_provider, self._delegate.CreateGithubFileSystemProvider( object_store_creator), CloudStorageFileSystemProvider(object_store_creator), base_path='/_patch/%s/' % self._issue) # HACK: if content_providers.json changes in this patch then the cron needs # to be re-run to pull in the new configuration. _, _, modified = rietveld_patcher.GetPatchedFiles() if CONTENT_PROVIDERS in modified: server_instance.content_providers.Cron().Get() return server_instance
def Create(branch='master', commit=None): token, _ = app_identity.get_access_token(GITILES_OAUTH2_SCOPE) path_prefix = '' if token is None else _AUTH_PATH_PREFIX if commit: base_url = '%s%s/%s/%s' % (GITILES_BASE, path_prefix, GITILES_SRC_ROOT, commit) elif branch is 'master': base_url = '%s%s/%s/master' % (GITILES_BASE, path_prefix, GITILES_SRC_ROOT) else: base_url = '%s%s/%s/%s/%s' % (GITILES_BASE, path_prefix, GITILES_SRC_ROOT, GITILES_BRANCHES_PATH, branch) return GitilesFileSystem(AppEngineUrlFetcher(), base_url, branch, commit)
def Create(cls, branch='master', commit=None): token, _ = app_identity.get_access_token(GITILES_OAUTH2_SCOPE) # Log the access token (once per token) so that it can be sneakily re-used # in development. if token not in cls._logged_tokens: logging.info('Got token %s for scope %s' % (token, GITILES_OAUTH2_SCOPE)) cls._logged_tokens.add(token) # Only include forced-auth (/a/) in the Gitiles URL if we have a token and # this is not the development server. path_prefix = ('' if token is None or IsDevServer() else _AUTH_PATH_PREFIX) if commit: base_url = '%s%s/%s/%s' % ( GITILES_BASE, path_prefix, GITILES_SRC_ROOT, commit) elif branch is 'master': base_url = '%s%s/%s/master' % ( GITILES_BASE, path_prefix, GITILES_SRC_ROOT) else: base_url = '%s%s/%s/%s/%s' % ( GITILES_BASE, path_prefix, GITILES_SRC_ROOT, GITILES_BRANCHES_PATH, branch) return GitilesFileSystem(AppEngineUrlFetcher(), base_url, branch, commit)
# The branch that the server will default to when no branch is specified in the # URL. This is necessary because it is not possible to pass flags to the script # handler. # Production settings: DEFAULT_BRANCHES = {'extensions': 'stable', 'apps': 'trunk'} # Dev settings: # DEFAULT_BRANCHES = { 'extensions': 'local', 'apps': 'local' } # Increment this version to force the server to reload all pages in the first # cron job that is run. _VERSION = 0 BRANCH_UTILITY_MEMCACHE = InMemoryObjectStore('branch_utility') BRANCH_UTILITY = BranchUtility(url_constants.OMAHA_PROXY_URL, DEFAULT_BRANCHES, AppEngineUrlFetcher(None), BRANCH_UTILITY_MEMCACHE) GITHUB_MEMCACHE = InMemoryObjectStore('github') GITHUB_FILE_SYSTEM = GithubFileSystem( AppEngineUrlFetcher(url_constants.GITHUB_URL), GITHUB_MEMCACHE, AppEngineBlobstore()) GITHUB_COMPILED_FILE_SYSTEM = CompiledFileSystem.Factory( GITHUB_FILE_SYSTEM, GITHUB_MEMCACHE) EXTENSIONS_PATH = 'chrome/common/extensions' DOCS_PATH = 'docs' API_PATH = 'api' TEMPLATE_PATH = DOCS_PATH + '/templates' INTRO_PATH = TEMPLATE_PATH + '/intros' ARTICLE_PATH = TEMPLATE_PATH + '/articles'
def _GetOrCreateGithubFileSystem(): if ServerInstance.github_file_system is None: ServerInstance.github_file_system = GithubFileSystem( AppEngineUrlFetcher(url_constants.GITHUB_URL), AppEngineBlobstore()) return ServerInstance.github_file_system
from subversion_file_system import SubversionFileSystem from template_data_source import TemplateDataSource from third_party.json_schema_compiler.model import UnixName import url_constants # The branch that the server will default to when no branch is specified in the # URL. This is necessary because it is not possible to pass flags to the script # handler. # Production settings: DEFAULT_BRANCHES = {'extensions': 'stable', 'apps': 'trunk'} # Dev settings: # DEFAULT_BRANCHES = { 'extensions': 'local', 'apps': 'local' } BRANCH_UTILITY_MEMCACHE = InMemoryObjectStore('branch_utility') BRANCH_UTILITY = BranchUtility(url_constants.OMAHA_PROXY_URL, DEFAULT_BRANCHES, AppEngineUrlFetcher(None), BRANCH_UTILITY_MEMCACHE) GITHUB_MEMCACHE = InMemoryObjectStore('github') GITHUB_FILE_SYSTEM = GithubFileSystem( AppEngineUrlFetcher(url_constants.GITHUB_URL), GITHUB_MEMCACHE, AppEngineBlobstore()) GITHUB_COMPILED_FILE_SYSTEM = CompiledFileSystem.Factory( GITHUB_FILE_SYSTEM, GITHUB_MEMCACHE) EXTENSIONS_PATH = 'chrome/common/extensions' DOCS_PATH = 'docs' API_PATH = 'api' TEMPLATE_PATH = DOCS_PATH + '/templates' INTRO_PATH = TEMPLATE_PATH + '/intros' ARTICLE_PATH = TEMPLATE_PATH + '/articles'
class GithubFileSystem(FileSystem): @staticmethod def CreateChromeAppsSamples(object_store_creator): return GithubFileSystem( '%s/GoogleChrome/chrome-app-samples' % url_constants.GITHUB_REPOS, AppEngineBlobstore(), object_store_creator) def __init__(self, url, blobstore, object_store_creator): # If we key the password store on the app version then the whole advantage # of having it in the first place is greatly lessened (likewise it should # always start populated). password_store = object_store_creator.Create( GithubFileSystem, app_version=None, category='password', start_empty=False) if USERNAME is None: password_data = password_store.GetMulti(('username', 'password')).Get() self._username, self._password = (password_data.get('username'), password_data.get('password')) else: password_store.SetMulti({'username': USERNAME, 'password': PASSWORD}) self._username, self._password = (USERNAME, PASSWORD) self._url = url self._fetcher = AppEngineUrlFetcher(url) self._blobstore = blobstore self._stat_object_store = object_store_creator.Create(GithubFileSystem) self._version = None self._GetZip(self.Stat(ZIP_KEY).version) def _GetZip(self, version): try: blob = self._blobstore.Get(_MakeBlobstoreKey(version), BLOBSTORE_GITHUB) except blobstore.BlobNotFoundError: self._zip_file = Future(value=None) return if blob is not None: try: self._zip_file = Future(value=ZipFile(StringIO(blob))) except BadZipfile as e: self._blobstore.Delete(_MakeBlobstoreKey(version), BLOBSTORE_GITHUB) logging.error('Bad github zip file: %s' % e) self._zip_file = Future(value=None) else: self._zip_file = Future( callback=_GetAsyncFetchCallback(self._fetcher, self._username, self._password, self._blobstore, version, key_to_delete=self._version)) self._version = version def _ReadFile(self, path): try: zip_file = self._zip_file.Get() except Exception as e: logging.error('Github ReadFile error: %s' % e) return '' if zip_file is None: logging.error('Bad github zip file.') return '' prefix = zip_file.namelist()[0] return zip_file.read(prefix + path) def _ListDir(self, path): try: zip_file = self._zip_file.Get() except Exception as e: logging.error('Github ListDir error: %s' % e) return [] if zip_file is None: logging.error('Bad github zip file.') return [] filenames = zip_file.namelist() # Take out parent directory name (GoogleChrome-chrome-app-samples-c78a30f) filenames = [f[len(filenames[0]):] for f in filenames] # Remove the path of the directory we're listing from the filenames. filenames = [f[len(path):] for f in filenames if f != path and f.startswith(path)] # Remove all files not directly in this directory. return [f for f in filenames if f[:-1].count('/') == 0] def Read(self, paths, skip_not_found=False): version = self.Stat(ZIP_KEY).version if version != self._version: self._GetZip(version) result = {} for path in paths: if IsDirectory(path): result[path] = self._ListDir(path) else: result[path] = self._ReadFile(path) return Future(value=result) def _DefaultStat(self, path): version = 0 # TODO(kalman): we should replace all of this by wrapping the # GithubFileSystem in a CachingFileSystem. A lot of work has been put into # CFS to be robust, and GFS is missing out. # For example: the following line is wrong, but it could be moot. self._stat_object_store.Set(path, version) return StatInfo(version) def Stat(self, path): version = self._stat_object_store.Get(path).Get() if version is not None: return StatInfo(version) try: result = self._fetcher.Fetch('commits/HEAD', username=USERNAME, password=PASSWORD) except urlfetch.DownloadError as e: logging.warning('GithubFileSystem Stat: %s' % e) return self._DefaultStat(path) # Check if Github authentication failed. if result.status_code == 401: logging.warning('Github authentication failed for %s, falling back to ' 'unauthenticated.' % USERNAME) try: result = self._fetcher.Fetch('commits/HEAD') except urlfetch.DownloadError as e: logging.warning('GithubFileSystem Stat: %s' % e) return self._DefaultStat(path) # Parse response JSON - but sometimes github gives us invalid JSON. try: version = json.loads(result.content)['sha'] self._stat_object_store.Set(path, version) return StatInfo(version) except StandardError as e: logging.warning( ('%s: got invalid or unexpected JSON from github. Response status ' + 'was %s, content %s') % (e, result.status_code, result.content)) return self._DefaultStat(path) def GetIdentity(self): return '%s@%s' % (self.__class__.__name__, StringIdentity(self._url))
def _GetOrCreateBranchUtility(): if ServerInstance.branch_utility is None: ServerInstance.branch_utility = BranchUtility( url_constants.OMAHA_PROXY_URL, AppEngineUrlFetcher()) return ServerInstance.branch_utility
def Create(object_store_creator): return BranchUtility(url_constants.OMAHA_PROXY_URL, url_constants.OMAHA_DEV_HISTORY, AppEngineUrlFetcher(), object_store_creator)
def setUp(self): ConfigureFakeFetchers() self._patcher = RietveldPatcher( EXTENSIONS_PATH, '14096030', AppEngineUrlFetcher(url_constants.CODEREVIEW_SERVER))
def Create(object_store_creator): return GithubFileSystem(AppEngineUrlFetcher(url_constants.GITHUB_URL), blobstore.AppEngineBlobstore(), object_store_creator)