示例#1
0
    def Get(self):
        if (not IsDevServer() and not fnmatch(
                urlparse(self._request.host).netloc, '*.appspot.com')):
            # Only allow patches on appspot URLs; it doesn't matter if appspot.com is
            # XSS'ed, but it matters for chrome.com.
            redirect_host = 'https://chrome-apps-doc.appspot.com'
            logging.info('Redirecting from XSS-able host %s to %s' %
                         (self._request.host, redirect_host))
            return Response.Redirect('%s/_patch/%s' %
                                     (redirect_host, self._request.path))

        path_with_issue = self._request.path.lstrip('/')
        if '/' in path_with_issue:
            issue, path_without_issue = path_with_issue.split('/', 1)
        else:
            return Response.NotFound(
                'Malformed URL. It should look like ' +
                'https://developer.chrome.com/_patch/12345/extensions/...')

        try:
            response = RenderServlet(
                Request(path_without_issue, self._request.host,
                        self._request.headers),
                _PatchServletDelegate(issue, self._delegate)).Get()
            # Disable cache for patched content.
            response.headers.pop('cache-control', None)
        except RietveldPatcherError as e:
            response = Response.NotFound(e.message,
                                         {'Content-Type': 'text/plain'})

        redirect_url, permanent = response.GetRedirect()
        if redirect_url is not None:
            response = Response.Redirect(
                '/_patch/%s%s' % (issue, redirect_url), permanent)
        return response
示例#2
0
 def DelMulti(self, keys):
   futures = []
   for key in keys:
     futures.append(db.delete_async(
         PersistentObjectStoreItem.CreateKey(self._namespace, key)))
   # If running the dev server, the futures don't complete until the server is
   # *quitting*. This is annoying. Flush now.
   if IsDevServer():
     [future.wait() for future in futures]
 def SetMulti(self, mapping):
     futures = []
     for key, value in mapping.items():
         futures.append(
             db.put_async(
                 PersistentObjectStoreItem.CreateItem(
                     self._namespace, key, value)))
     # If running the dev server, the futures don't complete until the server is
     # *quitting*. This is annoying. Flush now.
     if IsDevServer():
         [future.wait() for future in futures]
示例#4
0
 def SetMulti(self, mapping):
   entities = [PersistentObjectStoreItem.CreateItem(
                   self._namespace, key, value)
               for key, value in mapping.iteritems()]
   # Some entites may be None if they were too large to insert. Skip those.
   rpcs = [db.put_async(entity for entity in entities if entity)]
   # If running the dev server, the futures don't complete until the server is
   # *quitting*. This is annoying. Flush now.
   if IsDevServer():
     [rpc.wait() for rpc in rpcs]
   return All(Future(callback=lambda: rpc.get_result()) for rpc in rpcs)
示例#5
0
 def SetMulti(self, mapping):
     rpcs = [
         db.put_async(
             PersistentObjectStoreItem.CreateItem(self._namespace, key,
                                                  value))
         for key, value in mapping.iteritems()
     ]
     # If running the dev server, the futures don't complete until the server is
     # *quitting*. This is annoying. Flush now.
     if IsDevServer():
         [rpc.wait() for rpc in rpcs]
     return All(Future(callback=lambda: rpc.get_result()) for rpc in rpcs)
示例#6
0
 def CreateServerInstance(self):
     object_store_creator = ObjectStoreCreator(start_empty=False)
     branch_utility = self._delegate.CreateBranchUtility(
         object_store_creator)
     # In production have offline=True so that we can catch cron errors.  In
     # development it's annoying to have to run the cron job, so offline=False.
     host_file_system_provider = self._delegate.CreateHostFileSystemProvider(
         object_store_creator, offline=not IsDevServer())
     github_file_system_provider = self._delegate.CreateGithubFileSystemProvider(
         object_store_creator)
     return ServerInstance(object_store_creator,
                           CompiledFileSystem.Factory(object_store_creator),
                           branch_utility, host_file_system_provider,
                           github_file_system_provider)
示例#7
0
 def Get(self):
     if not IsDevServer():
         return Response.BadRequest('')
     import cPickle
     from persistent_object_store_appengine import PersistentObjectStoreAppengine
     with open(self._request.path, 'r') as f:
         data = cPickle.load(f)
     for namespace, contents in data.iteritems():
         store = PersistentObjectStoreAppengine(namespace)
         for k, v in cPickle.loads(contents).iteritems():
             try:
                 store.Set(k, v).Get()
             except:
                 logging.warn('Skipping entry %s because of errors.' % k)
     return Response.Ok('Data pushed!')
示例#8
0
 def GetSamplesModel(self, platform):
     if self._platform_data[platform].samples_model is None:
         # Note: samples are super slow in the dev server because it doesn't
         # support async fetch, so disable them.
         if IsDevServer():
             extension_samples_fs = EmptyDirFileSystem()
             app_samples_fs = EmptyDirFileSystem()
         else:
             extension_samples_fs = self._host_fs_at_master
             # TODO(kalman): Re-enable the apps samples, see http://crbug.com/344097.
             app_samples_fs = EmptyDirFileSystem()
         self._platform_data[platform].samples_model = SamplesModel(
             extension_samples_fs,
             app_samples_fs, self._compiled_fs_factory,
             self.GetReferenceResolver(platform), self._base_path, platform)
     return self._platform_data[platform].samples_model
 def CreateServerInstance(self):
     object_store_creator = ObjectStoreCreator(start_empty=False)
     branch_utility = self._delegate.CreateBranchUtility(
         object_store_creator)
     # In production have offline=True so that we can catch cron errors. In
     # development it's annoying to have to run the cron job, so offline=False.
     # Note that offline=True if running on any appengine server due to
     # http://crbug.com/345361.
     host_file_system_provider = self._delegate.CreateHostFileSystemProvider(
         object_store_creator,
         offline=not (IsDevServer() or IsReleaseServer()))
     github_file_system_provider = self._delegate.CreateGithubFileSystemProvider(
         object_store_creator)
     return ServerInstance(
         object_store_creator,
         CompiledFileSystem.Factory(object_store_creator), branch_utility,
         host_file_system_provider, github_file_system_provider,
         CloudStorageFileSystemProvider(object_store_creator))
示例#10
0
  def Create(cls, branch='master', commit=None):
    token, _ = app_identity.get_access_token(GITILES_OAUTH2_SCOPE)

    # Log the access token (once per token) so that it can be sneakily re-used
    # in development.
    if token not in cls._logged_tokens:
      logging.info('Got token %s for scope %s' % (token, GITILES_OAUTH2_SCOPE))
      cls._logged_tokens.add(token)

    # Only include forced-auth (/a/) in the Gitiles URL if we have a token and
    # this is not the development server.
    path_prefix = ('' if token is None or IsDevServer()
                      else _AUTH_PATH_PREFIX)
    if commit:
      base_url = '%s%s/%s/%s' % (
          GITILES_BASE, path_prefix, GITILES_SRC_ROOT, commit)
    elif branch is 'master':
      base_url = '%s%s/%s/master' % (
          GITILES_BASE, path_prefix, GITILES_SRC_ROOT)
    else:
      base_url = '%s%s/%s/%s/%s' % (
          GITILES_BASE, path_prefix, GITILES_SRC_ROOT,
          GITILES_BRANCHES_PATH, branch)
    return GitilesFileSystem(AppEngineUrlFetcher(), base_url, branch, commit)
示例#11
0
  def __init__(self,
               object_store_creator,
               compiled_fs_factory,
               branch_utility,
               host_file_system_provider,
               github_file_system_provider,
               gcs_file_system_provider,
               base_path='/'):
    '''
    |object_store_creator|
        The ObjectStoreCreator used to create almost all caches.
    |compiled_fs_factory|
        Factory used to create CompiledFileSystems, a higher-level cache type
        than ObjectStores. This can usually be derived from just
        |object_store_creator| but under special circumstances a different
        implementation needs to be passed in.
    |branch_utility|
        Has knowledge of Chrome branches, channels, and versions.
    |host_file_system_provider|
        Creates FileSystem instances which host the server at alternative
        revisions.
    |github_file_system_provider|
        Creates FileSystem instances backed by GitHub.
    |base_path|
        The path which all HTML is generated relative to. Usually this is /
        but some servlets need to override this.
    '''
    self.object_store_creator = object_store_creator

    self.compiled_fs_factory = compiled_fs_factory

    self.host_file_system_provider = host_file_system_provider
    host_fs_at_trunk = host_file_system_provider.GetTrunk()

    self.github_file_system_provider = github_file_system_provider
    self.gcs_file_system_provider = gcs_file_system_provider

    assert base_path.startswith('/') and base_path.endswith('/')
    self.base_path = base_path

    self.host_file_system_iterator = HostFileSystemIterator(
        host_file_system_provider,
        branch_utility)

    self.features_bundle = FeaturesBundle(
        host_fs_at_trunk,
        self.compiled_fs_factory,
        self.object_store_creator)

    self.api_models = APIModels(
        self.features_bundle,
        self.compiled_fs_factory,
        host_fs_at_trunk)

    self.availability_finder = AvailabilityFinder(
        branch_utility,
        compiled_fs_factory,
        self.host_file_system_iterator,
        host_fs_at_trunk,
        object_store_creator)

    self.api_categorizer = APICategorizer(
        host_fs_at_trunk,
        compiled_fs_factory)

    self.api_data_source_factory = APIDataSource.Factory(
        self.compiled_fs_factory,
        host_fs_at_trunk,
        self.availability_finder,
        self.api_models,
        self.features_bundle,
        self.object_store_creator)

    self.ref_resolver_factory = ReferenceResolver.Factory(
        self.api_data_source_factory,
        self.api_models,
        object_store_creator)

    self.api_data_source_factory.SetReferenceResolverFactory(
        self.ref_resolver_factory)

    # Note: samples are super slow in the dev server because it doesn't support
    # async fetch, so disable them.
    if IsDevServer():
      extension_samples_fs = EmptyDirFileSystem()
      app_samples_fs = EmptyDirFileSystem()
    else:
      extension_samples_fs = host_fs_at_trunk
      # TODO(kalman): Re-enable the apps samples, see http://crbug.com/344097.
      app_samples_fs = EmptyDirFileSystem()
      #app_samples_fs = github_file_system_provider.Create(
      #    'GoogleChrome', 'chrome-app-samples')
    self.samples_data_source_factory = SamplesDataSource.Factory(
        extension_samples_fs,
        app_samples_fs,
        CompiledFileSystem.Factory(object_store_creator),
        self.ref_resolver_factory,
        base_path)

    self.api_data_source_factory.SetSamplesDataSourceFactory(
        self.samples_data_source_factory)

    self.content_providers = ContentProviders(
        object_store_creator,
        self.compiled_fs_factory,
        host_fs_at_trunk,
        self.github_file_system_provider,
        self.gcs_file_system_provider)

    # TODO(kalman): Move all the remaining DataSources into DataSourceRegistry,
    # then factor out the DataSource creation into a factory method, so that
    # the entire ServerInstance doesn't need to be passed in here.
    self.template_renderer = TemplateRenderer(self)

    # TODO(kalman): It may be better for |document_renderer| to construct a
    # TemplateDataSource itself rather than depending on template_renderer, but
    # for that the above todo should be addressed.
    self.document_renderer = DocumentRenderer(
        TableOfContentsRenderer(host_fs_at_trunk,
                                compiled_fs_factory,
                                self.template_renderer),
        self.ref_resolver_factory.Create())
示例#12
0
    def _GetImpl(self):
        # Cron strategy:
        #
        # Find all public template files and static files, and render them. Most of
        # the time these won't have changed since the last cron run, so it's a
        # little wasteful, but hopefully rendering is really fast (if it isn't we
        # have a problem).
        _cronlog.info('starting')

        # This is returned every time RenderServlet wants to create a new
        # ServerInstance.
        #
        # TODO(kalman): IMPORTANT. This sometimes throws an exception, breaking
        # everything. Need retry logic at the fetcher level.
        server_instance = self._GetSafeServerInstance()
        trunk_fs = server_instance.host_file_system_provider.GetTrunk()

        def render(path):
            request = Request(path, self._request.host, self._request.headers)
            delegate = _SingletonRenderServletDelegate(server_instance)
            return RenderServlet(request, delegate).Get()

        def request_files_in_dir(path, prefix='', strip_ext=None):
            '''Requests every file found under |path| in this host file system, with
      a request prefix of |prefix|. |strip_ext| is an optional list of file
      extensions that should be stripped from paths before requesting.
      '''
            def maybe_strip_ext(name):
                if name == SITE_VERIFICATION_FILE or not strip_ext:
                    return name
                base, ext = posixpath.splitext(name)
                return base if ext in strip_ext else name

            files = [
                maybe_strip_ext(name)
                for name, _ in CreateURLsFromPaths(trunk_fs, path, prefix)
            ]
            return _RequestEachItem(path, files, render)

        results = []

        try:
            # Start running the hand-written Cron methods first; they can be run in
            # parallel. They are resolved at the end.
            def run_cron_for_future(target):
                title = target.__class__.__name__
                future, init_timer = TimerClosure(target.Cron)
                assert isinstance(
                    future,
                    Future), ('%s.Cron() did not return a Future' % title)

                def resolve():
                    resolve_timer = Timer()
                    try:
                        future.Get()
                    except Exception as e:
                        _cronlog.error('%s: error %s' %
                                       (title, traceback.format_exc()))
                        results.append(False)
                        if IsDeadlineExceededError(e): raise
                    finally:
                        resolve_timer.Stop()
                        _cronlog.info(
                            '%s took %s: %s to initialize and %s to resolve' %
                            (title,
                             init_timer.With(resolve_timer).FormatElapsed(),
                             init_timer.FormatElapsed(),
                             resolve_timer.FormatElapsed()))

                return Future(delegate=Gettable(resolve))

            targets = (CreateDataSources(server_instance).values() +
                       [server_instance.content_providers])
            title = 'initializing %s parallel Cron targets' % len(targets)
            _cronlog.info(title)
            timer = Timer()
            try:
                cron_futures = [
                    run_cron_for_future(target) for target in targets
                ]
            finally:
                _cronlog.info('%s took %s' %
                              (title, timer.Stop().FormatElapsed()))

            # Rendering the public templates will also pull in all of the private
            # templates.
            results.append(
                request_files_in_dir(PUBLIC_TEMPLATES,
                                     strip_ext=('.html', '.md')))

            # Rendering the public templates will have pulled in the .js and
            # manifest.json files (for listing examples on the API reference pages),
            # but there are still images, CSS, etc.
            results.append(request_files_in_dir(STATIC_DOCS, prefix='static'))

            # Samples are too expensive to run on the dev server, where there is no
            # parallel fetch.
            if not IsDevServer():
                # Fetch each individual sample file.
                results.append(
                    request_files_in_dir(EXAMPLES,
                                         prefix='extensions/examples'))

                # Fetch the zip file of each example (contains all the individual
                # files).
                example_zips = []
                for root, _, files in trunk_fs.Walk(EXAMPLES):
                    example_zips.extend(root + '.zip' for name in files
                                        if name == 'manifest.json')
                results.append(
                    _RequestEachItem(
                        'example zips', example_zips,
                        lambda path: render('extensions/examples/' + path)))

            # Resolve the hand-written Cron method futures.
            title = 'resolving %s parallel Cron targets' % len(targets)
            _cronlog.info(title)
            timer = Timer()
            try:
                for future in cron_futures:
                    future.Get()
            finally:
                _cronlog.info('%s took %s' %
                              (title, timer.Stop().FormatElapsed()))

        except:
            results.append(False)
            # This should never actually happen (each cron step does its own
            # conservative error checking), so re-raise no matter what it is.
            _cronlog.error('uncaught error: %s' % traceback.format_exc())
            raise
        finally:
            success = all(results)
            _cronlog.info('finished (%s)', 'success' if success else 'FAILED')
            return (Response.Ok('Success')
                    if success else Response.InternalError('Failure'))
示例#13
0
    def __init__(self,
                 object_store_creator,
                 compiled_fs_factory,
                 branch_utility,
                 host_file_system_provider,
                 github_file_system_provider,
                 base_path='/'):
        '''
    |object_store_creator|
        The ObjectStoreCreator used to create almost all caches.
    |compiled_fs_factory|
        Factory used to create CompiledFileSystems, a higher-level cache type
        than ObjectStores. This can usually be derived from just
        |object_store_creator| but under special circumstances a different
        implementation needs to be passed in.
    |branch_utility|
        Has knowledge of Chrome branches, channels, and versions.
    |host_file_system_provider|
        Creates FileSystem instances which host the server at alternative
        revisions.
    |github_file_system_provider|
        Creates FileSystem instances backed by GitHub.
    |base_path|
        The path which all HTML is generated relative to. Usually this is /
        but some servlets need to override this.
    '''
        self.object_store_creator = object_store_creator

        self.compiled_fs_factory = compiled_fs_factory

        self.host_file_system_provider = host_file_system_provider
        host_fs_at_trunk = host_file_system_provider.GetTrunk()

        self.github_file_system_provider = github_file_system_provider

        assert base_path.startswith('/') and base_path.endswith('/')
        self.base_path = base_path

        self.host_file_system_iterator = HostFileSystemIterator(
            host_file_system_provider, branch_utility)

        self.features_bundle = FeaturesBundle(host_fs_at_trunk,
                                              self.compiled_fs_factory,
                                              self.object_store_creator)

        self.api_models = APIModels(self.features_bundle,
                                    self.compiled_fs_factory, host_fs_at_trunk)

        self.availability_finder = AvailabilityFinder(
            branch_utility, compiled_fs_factory,
            self.host_file_system_iterator, host_fs_at_trunk,
            object_store_creator)

        self.api_list_data_source_factory = APIListDataSource.Factory(
            self.compiled_fs_factory, host_fs_at_trunk, self.features_bundle,
            self.object_store_creator)

        self.api_data_source_factory = APIDataSource.Factory(
            self.compiled_fs_factory, host_fs_at_trunk, svn_constants.API_PATH,
            self.availability_finder, branch_utility)

        self.ref_resolver_factory = ReferenceResolver.Factory(
            self.api_data_source_factory, self.api_models,
            object_store_creator)

        self.api_data_source_factory.SetReferenceResolverFactory(
            self.ref_resolver_factory)

        # Note: samples are super slow in the dev server because it doesn't support
        # async fetch, so disable them.
        if IsDevServer():
            extension_samples_fs = EmptyDirFileSystem()
            app_samples_fs = EmptyDirFileSystem()
        else:
            extension_samples_fs = host_fs_at_trunk
            app_samples_fs = github_file_system_provider.Create(
                'GoogleChrome', 'chrome-app-samples')
        self.samples_data_source_factory = SamplesDataSource.Factory(
            extension_samples_fs, app_samples_fs,
            CompiledFileSystem.Factory(object_store_creator),
            self.ref_resolver_factory, svn_constants.EXAMPLES_PATH, base_path)

        self.api_data_source_factory.SetSamplesDataSourceFactory(
            self.samples_data_source_factory)

        self.intro_data_source_factory = IntroDataSource.Factory(
            self.compiled_fs_factory, host_fs_at_trunk,
            self.ref_resolver_factory,
            [svn_constants.INTRO_PATH, svn_constants.ARTICLE_PATH])

        self.path_canonicalizer = PathCanonicalizer(self.compiled_fs_factory,
                                                    host_fs_at_trunk)

        self.content_providers = ContentProviders(
            self.compiled_fs_factory, host_fs_at_trunk,
            self.github_file_system_provider)

        # TODO(kalman): Move all the remaining DataSources into DataSourceRegistry,
        # then factor out the DataSource creation into a factory method, so that
        # the entire ServerInstance doesn't need to be passed in here.
        self.template_renderer = TemplateRenderer(self)

        self.strings_json_path = svn_constants.STRINGS_JSON_PATH
        self.manifest_json_path = svn_constants.MANIFEST_JSON_PATH
        self.manifest_features_path = svn_constants.MANIFEST_FEATURES_PATH