Beispiel #1
0
    def __init__(self, object_store_creator, compiled_fs_factory,
                 host_file_system, github_file_system_provider,
                 gcs_file_system_provider):
        self._object_store_creator = object_store_creator
        self._compiled_fs_factory = compiled_fs_factory
        self._host_file_system = host_file_system
        self._github_file_system_provider = github_file_system_provider
        self._gcs_file_system_provider = gcs_file_system_provider
        self._cache = None

        # If running the devserver and there is a LOCAL_DEBUG_DIR, we
        # will read the content_provider configuration from there instead
        # of fetching it from SVN trunk or patch.
        if environment.IsDevServer() and os.path.exists(LOCAL_DEBUG_DIR):
            local_fs = LocalFileSystem(LOCAL_DEBUG_DIR)
            conf_stat = None
            try:
                conf_stat = local_fs.Stat(CONTENT_PROVIDERS)
            except:
                pass

            if conf_stat:
                logging.warn(
                    ("Using local debug folder (%s) for "
                     "content_provider.json configuration") % LOCAL_DEBUG_DIR)
                self._cache = compiled_fs_factory.ForJson(local_fs)

        if not self._cache:
            self._cache = compiled_fs_factory.ForJson(host_file_system)
Beispiel #2
0
 def ForLocal(object_store_creator, **optargs):
     '''Used in creating a server instance on localhost.
 '''
     return HostFileSystemProvider(
         object_store_creator,
         constructor_for_test=lambda **_: LocalFileSystem.Create(),
         **optargs)
    def testSimple(self):
        self._base_path = os.path.join(self._base_path, 'simple')
        fetcher = LocalFileSystem(self._base_path)
        compiled_fs_factory = CompiledFileSystem.Factory(
            fetcher, ObjectStoreCreator.Factory())
        t_data_source = self._CreateTemplateDataSource(
            compiled_fs_factory, ObjectStoreCreator.Factory())
        template_a1 = Handlebar(self._ReadLocalFile('test1.html'))
        self.assertEqual(
            template_a1.render({}, {
                'templates': {}
            }).text,
            t_data_source.get('test1').render({}, {
                'templates': {}
            }).text)

        template_a2 = Handlebar(self._ReadLocalFile('test2.html'))
        self.assertEqual(
            template_a2.render({}, {
                'templates': {}
            }).text,
            t_data_source.get('test2').render({}, {
                'templates': {}
            }).text)

        self.assertEqual(None, t_data_source.get('junk.html'))
Beispiel #4
0
    def testEverything(self):
        # All these tests are dependent (see above comment) so lump everything in
        # the one test.
        delegate = _TestDelegate(
            lambda _: MockFileSystem(LocalFileSystem.Create()))

        # Test that the cron runs successfully.
        response = CronServlet(Request.ForTest('trunk'),
                               delegate_for_test=delegate).Get()
        self.assertEqual(200, response.status)

        # Save the file systems created, start with a fresh set for the next run.
        first_run_file_systems = delegate.file_systems[:]
        delegate.file_systems[:] = []

        # When re-running, all file systems should be Stat()d the same number of
        # times, but the second round shouldn't have been re-Read() since the
        # Stats haven't changed.
        response = CronServlet(Request.ForTest('trunk'),
                               delegate_for_test=delegate).Get()
        self.assertEqual(200, response.status)

        self.assertEqual(len(first_run_file_systems),
                         len(delegate.file_systems))
        for i, second_run_file_system in enumerate(delegate.file_systems):
            self.assertTrue(*second_run_file_system.CheckAndReset(
                read_count=0,
                stat_count=first_run_file_systems[i].GetStatCount()))
 def testNotFound(self):
     self._base_path = os.path.join(self._base_path, 'simple')
     fetcher = LocalFileSystem(self._base_path)
     compiled_fs_factory = CompiledFileSystem.Factory(
         fetcher, ObjectStoreCreator.TestFactory())
     t_data_source = self._CreateTemplateDataSource(
         compiled_fs_factory, ObjectStoreCreator.TestFactory())
     self.assertEqual(None, t_data_source.get('junk.html'))
 def _MakeShaJson(self, hash_value):
     commit_json = json.loads(
         deepcopy(
             LocalFileSystem('').ReadSingle(
                 'test_data/github_file_system/test_owner/repo/commits/HEAD'
             ).Get()))
     commit_json['sha'] = hash_value
     return json.dumps(commit_json)
Beispiel #7
0
 def setUp(self):
     object_store_creator = ObjectStoreCreator.ForTest()
     self._file_system = CachingFileSystem(
         LocalFileSystem(os.path.join(sys.path[0], 'test_data')),
         object_store_creator)
     self._example_zipper = ExampleZipper(
         CompiledFileSystem.Factory(self._file_system,
                                    object_store_creator), 'example_zipper')
 def setUp(self):
     object_store = InMemoryObjectStore('')
     self._file_system = MemcacheFileSystem(
         LocalFileSystem(os.path.join(sys.path[0], 'test_data')),
         object_store)
     self._example_zipper = ExampleZipper(
         self._file_system,
         CompiledFileSystem.Factory(self._file_system, object_store),
         'example_zipper')
Beispiel #9
0
 def testPartials(self):
     self._base_path = os.path.join(self._base_path, 'partials')
     fetcher = LocalFileSystem(self._base_path)
     cache_factory = CompiledFileSystem.Factory(fetcher, self._object_store)
     t_data_source = self._CreateTemplateDataSource(cache_factory)
     self.assertEqual(
         self._ReadLocalFile('test_expected.html'),
         t_data_source.get('test_tmpl').render(
             json.loads(self._ReadLocalFile('input.json')),
             t_data_source).text)
 def setUp(self):
   self._base_path = os.path.join(sys.path[0],
                                  'test_data',
                                  'template_data_source')
   self._fake_api_list_data_source_factory = _FakeFactory()
   self._fake_intro_data_source_factory = _FakeFactory()
   self._fake_samples_data_source_factory = _FakeFactory()
   self._fake_sidenav_data_source_factory = _FakeFactory()
   self._manifest_data_source = ManifestDataSource(
     _FakeFactory(), LocalFileSystem.Create(), '', '')
Beispiel #11
0
 def ForLocal():
     channel = 'trunk'
     object_store_creator = ObjectStoreCreator(channel,
                                               start_empty=False,
                                               store_type=TestObjectStore)
     file_system = CachingFileSystem(LocalFileSystem.Create(),
                                     object_store_creator)
     return ServerInstance(
         channel, object_store_creator, file_system, EmptyDirFileSystem(),
         '', CompiledFileSystem.Factory(file_system, object_store_creator))
Beispiel #12
0
        def make_sha_json(hash_value):
            from copy import deepcopy
            commit_json = json.loads(
                deepcopy(
                    LocalFileSystem('').ReadSingle(
                        'test_data/github_file_system/test_owner/repo/commits/HEAD'
                    ).Get()))
            commit_json['commit']['tree']['sha'] = 'hash' + FAKE_HASH[4:]

            return json.dumps(commit_json)
Beispiel #13
0
def _GetInstanceForBranch(channel_name, local_path):
  branch = BRANCH_UTILITY.GetBranchNumberForChannelName(channel_name)

  # The key for the server is a tuple of |channel_name| with |branch|, since
  # sometimes stable and beta point to the same branch.
  instance_key = _MakeInstanceKey(channel_name, branch)
  instance = SERVER_INSTANCES.get(instance_key, None)
  if instance is not None:
    return instance

  branch_memcache = InMemoryObjectStore(branch)
  if branch == 'local':
    file_system = LocalFileSystem(local_path)
  else:
    file_system = _CreateMemcacheFileSystem(branch, branch_memcache)

  cache_factory = CompiledFileSystem.Factory(file_system, branch_memcache)
  api_list_data_source_factory = APIListDataSource.Factory(cache_factory,
                                                           file_system,
                                                           API_PATH,
                                                           PUBLIC_TEMPLATE_PATH)
  intro_data_source_factory = IntroDataSource.Factory(
      cache_factory,
      [INTRO_PATH, ARTICLE_PATH])
  samples_data_source_factory = SamplesDataSource.Factory(
      channel_name,
      file_system,
      GITHUB_FILE_SYSTEM,
      cache_factory,
      GITHUB_COMPILED_FILE_SYSTEM,
      api_list_data_source_factory,
      EXAMPLES_PATH)
  api_data_source_factory = APIDataSource.Factory(cache_factory,
                                                  API_PATH,
                                                  samples_data_source_factory)
  template_data_source_factory = TemplateDataSource.Factory(
      channel_name,
      api_data_source_factory,
      api_list_data_source_factory,
      intro_data_source_factory,
      samples_data_source_factory,
      KNOWN_ISSUES_DATA_SOURCE,
      cache_factory,
      PUBLIC_TEMPLATE_PATH,
      PRIVATE_TEMPLATE_PATH)
  example_zipper = ExampleZipper(file_system,
                                 cache_factory,
                                 DOCS_PATH)

  instance = ServerInstance(template_data_source_factory,
                            example_zipper,
                            cache_factory)
  SERVER_INSTANCES[instance_key] = instance
  return instance
class LocalFileSystemTest(unittest.TestCase):
    def setUp(self):
        self._file_system = LocalFileSystem(
            os.path.join(sys.path[0], 'test_data', 'file_system'))

    def testReadFiles(self):
        expected = {
            'test1.txt': 'test1\n',
            'test2.txt': 'test2\n',
            'test3.txt': 'test3\n',
        }
        self.assertEqual(
            expected,
            self._file_system.Read(['test1.txt', 'test2.txt',
                                    'test3.txt']).Get())

    def testListDir(self):
        expected = ['dir/']
        for i in range(7):
            expected.append('file%d.html' % i)
        self.assertEqual(expected,
                         sorted(self._file_system.ReadSingle('list/')))
 def testRender(self):
     self._base_path = os.path.join(self._base_path, 'render')
     fetcher = LocalFileSystem(self._base_path)
     context = json.loads(self._ReadLocalFile('test1.json'))
     cache_factory = CompiledFileSystem.Factory(fetcher, self._object_store)
     self._RenderTest(
         'test1',
         self._CreateTemplateDataSource(
             json.loads(self._ReadLocalFile('test1.json')), cache_factory))
     self._RenderTest(
         'test2',
         self._CreateTemplateDataSource(
             json.loads(self._ReadLocalFile('test2.json')), cache_factory))
 def testRender(self):
     self._base_path = os.path.join(self._base_path, 'render')
     fetcher = LocalFileSystem(self._base_path)
     context = json.loads(self._ReadLocalFile('test1.json'))
     compiled_fs_factory = CompiledFileSystem.Factory(
         fetcher, ObjectStoreCreator.Factory())
     self._RenderTest(
         'test1',
         self._CreateTemplateDataSource(
             compiled_fs_factory,
             api_data=json.loads(self._ReadLocalFile('test1.json'))))
     self._RenderTest(
         'test2',
         self._CreateTemplateDataSource(
             compiled_fs_factory,
             api_data=json.loads(self._ReadLocalFile('test2.json'))))
    def testEverything(self):
        # All these tests are dependent (see above comment) so lump everything in
        # the one test.
        delegate = _TestDelegate(
            lambda _: MockFileSystem(LocalFileSystem.Create()))

        # Test that the cron runs successfully.
        response = CronServlet(Request.ForTest('trunk'),
                               delegate_for_test=delegate).Get()
        self.assertEqual(1, len(delegate.file_systems))
        self.assertEqual(200, response.status)

        # When re-running, all file systems should be Stat()d the same number of
        # times, but the second round shouldn't have been re-Read() since the
        # Stats haven't changed.
        response = CronServlet(Request.ForTest('trunk'),
                               delegate_for_test=delegate).Get()
        self.assertEqual(2, len(delegate.file_systems))
        self.assertTrue(*delegate.file_systems[1].CheckAndReset(
            read_count=0, stat_count=delegate.file_systems[0].GetStatCount()))
Beispiel #18
0
    def DISABLED_testSimple(self):
        cache_factory = CompiledFileSystem.Factory(
            LocalFileSystem(self._base_path),
            InMemoryObjectStore('fake_branch'))
        data_source_factory = APIDataSource.Factory(cache_factory, '.',
                                                    FakeSamplesDataSource())
        data_source = data_source_factory.Create({})

        # Take the dict out of the list.
        expected = json.loads(self._ReadLocalFile('expected_test_file.json'))
        expected['permissions'] = None
        test1 = data_source['test_file']
        test1.pop('samples')
        self.assertEqual(expected, test1)
        test2 = data_source['testFile']
        test2.pop('samples')
        self.assertEqual(expected, test2)
        test3 = data_source['testFile.html']
        test3.pop('samples')
        self.assertEqual(expected, test3)
        self.assertRaises(FileNotFoundError, data_source.get, 'junk')
    def testSimple(self):
        self._base_path = os.path.join(self._base_path, 'simple')
        fetcher = LocalFileSystem(self._base_path)
        cache_factory = CompiledFileSystem.Factory(fetcher, self._object_store)
        t_data_source = self._CreateTemplateDataSource(
            self._fake_api_data_source_factory, cache_factory)
        template_a1 = Handlebar(self._ReadLocalFile('test1.html'))
        self.assertEqual(
            template_a1.render({}, {
                'templates': {}
            }).text, t_data_source['test1'].render({}, {
                'templates': {}
            }).text)

        template_a2 = Handlebar(self._ReadLocalFile('test2.html'))
        self.assertEqual(
            template_a2.render({}, {
                'templates': {}
            }).text, t_data_source['test2'].render({}, {
                'templates': {}
            }).text)

        self.assertEqual(None, t_data_source['junk.html'])
    def Create(self, bucket):
        '''Creates a CloudStorageFileSystemProvider.

    |bucket| is the name of GCS bucket, eg devtools-docs. It is expected
             that this bucket has Read permission for this app in its ACLs.

    Optional configuration can be set in a local_debug/gcs_debug.conf file:
      use_local_fs=True|False
      remote_bucket_prefix=<prefix>

    If running in Preview mode or in Development mode with use_local_fs set to
    True, buckets and files are looked for inside the local_debug folder instead
    of in the real GCS server.
    '''
        if IsTest():
            return EmptyDirFileSystem()

        debug_bucket_prefix = None
        use_local_fs = False
        if os.path.exists(LOCAL_GCS_DEBUG_CONF):
            with open(LOCAL_GCS_DEBUG_CONF, "r") as token_file:
                properties = dict(line.strip().split('=', 1)
                                  for line in token_file)
            use_local_fs = properties.get('use_local_fs', 'False') == 'True'
            debug_bucket_prefix = properties.get('remote_bucket_prefix', None)
            logging.debug('gcs: prefixing all bucket names with %s' %
                          debug_bucket_prefix)

        if use_local_fs:
            return LocalFileSystem(
                ToDirectory(os.path.join(LOCAL_GCS_DIR, bucket)))

        if debug_bucket_prefix:
            bucket = debug_bucket_prefix + bucket

        return CachingFileSystem(CloudStorageFileSystem(bucket),
                                 self._object_store_creator)
Beispiel #21
0
 def setUp(self):
     self._base_path = os.path.join(sys.path[0], 'test_data',
                                    'sidenav_data_source')
     self._compiled_fs_factory = CompiledFileSystem.Factory(
         LocalFileSystem(self._base_path), ObjectStoreCreator.TestFactory())
Beispiel #22
0
 def CreateServerInstance(self):
     return ServerInstance.ForTest(LocalFileSystem.Create())
Beispiel #23
0
 def setUp(self):
     self._path_canonicalizer = PathCanonicalizer(
         LocalFileSystem.Create(PUBLIC_TEMPLATES),
         ObjectStoreCreator.ForTest(), ('.html', '.md'))
Beispiel #24
0
 def Create(cls, branch='master', commit=None):
   if IsTest():
     return LocalFileSystem.Create('')
   return LocalGitFileSystem(branch, commit)
 def CreateServerInstanceForChannel(self, channel):
     return ServerInstance.ForTest(LocalFileSystem.Create())
Beispiel #26
0
 def setUp(self):
     self._json_path = 'docs/server2/test_data/sidenav_data_source'
     self._compiled_fs_factory = CompiledFileSystem.Factory(
         LocalFileSystem.Create(), ObjectStoreCreator.ForTest())
Beispiel #27
0
    def testCronAndPublicFiles(self):
        '''Runs cron then requests every public file. Cron needs to be run first
    because the public file requests are offline.
    '''
        if _EXPLICIT_TEST_FILES is not None:
            return

        print('Running cron...')
        start_time = time.time()
        try:
            response = Handler(Request.ForTest('/_cron/stable')).Get()
            self.assertEqual(200, response.status)
            self.assertEqual('Success', response.content.ToString())
        finally:
            print('Took %s seconds' % (time.time() - start_time))

        print("Checking for broken links...")
        start_time = time.time()
        link_error_detector = LinkErrorDetector(
            LocalFileSystem(os.path.join(sys.path[0], os.pardir, os.pardir)),
            lambda path: Handler(Request.ForTest(path)).Get(),
            'templates/public',
            ('extensions/index.html', 'apps/about_apps.html'))

        broken_links = link_error_detector.GetBrokenLinks()
        if broken_links:
            # TODO(jshumway): Test should fail when broken links are detected.
            print('Warning: Found %d broken links:' % (len(broken_links)))
            print(StringifyBrokenLinks(broken_links))

        print('Took %s seconds.' % (time.time() - start_time))

        print('Searching for orphaned pages...')
        start_time = time.time()
        orphaned_pages = link_error_detector.GetOrphanedPages()
        if orphaned_pages:
            # TODO(jshumway): Test should fail when orphaned pages are detected.
            print('Warning: Found %d orphaned pages:' % len(orphaned_pages))
            for page in orphaned_pages:
                print(page)
        print('Took %s seconds.' % (time.time() - start_time))

        public_files = _GetPublicFiles()

        print('Rendering %s public files...' % len(public_files.keys()))
        start_time = time.time()
        try:
            for path, content in public_files.iteritems():
                if path.endswith('redirects.json'):
                    continue

                def check_result(response):
                    self.assertEqual(
                        200, response.status,
                        'Got %s when rendering %s' % (response.status, path))
                    # This is reaaaaally rough since usually these will be tiny templates
                    # that render large files. At least it'll catch zero-length responses.
                    self.assertTrue(
                        len(response.content) >= len(content),
                        'Content was "%s" when rendering %s' %
                        (response.content, path))

                check_result(Handler(Request.ForTest(path)).Get())

                # Make sure that leaving out the .html will temporarily redirect to the
                # path with the .html.
                if path.startswith(('apps/', 'extensions/')):
                    redirect_result = Handler(
                        Request.ForTest(posixpath.splitext(path)[0])).Get()
                    self.assertEqual((path, False),
                                     redirect_result.GetRedirect())

                # Make sure including a channel will permanently redirect to the same
                # path without a channel.
                for channel in BranchUtility.GetAllChannelNames():
                    redirect_result = Handler(
                        Request.ForTest('%s/%s' % (channel, path))).Get()
                    self.assertEqual((path, True),
                                     redirect_result.GetRedirect())

                # Samples are internationalized, test some locales.
                if path.endswith('/samples.html'):
                    for lang in ['en-US', 'es', 'ar']:
                        check_result(
                            Handler(
                                Request.ForTest(path,
                                                headers={
                                                    'Accept-Language':
                                                    '%s;q=0.8' % lang
                                                })).Get())
        finally:
            print('Took %s seconds' % (time.time() - start_time))
    def Create(self, bucket):
        '''Creates a CloudStorageFileSystemProvider.

    |bucket| is the name of GCS bucket, eg devtools-docs. It is expected
             that this bucket has Read permission for this app in its ACLs.

    Optional configuration can be set in a local_debug/gcs_debug.conf file:
      use_local_fs=True|False
      access_token=<token>
      remote_bucket_prefix=<prefix>

    If running in Preview mode or in Development mode with use_local_fs set to
    True, buckets and files are looked inside the local_debug folder instead
    of in the real GCS server. Preview server does not support direct GCS
    access, so it is always forced to use a LocalFileSystem.

    For real GCS access in the Development mode (dev_appserver.py),
    access_token and remote_bucket_prefix options can be
    used to change the way GCS files are accessed. Both are ignored in a real
    appengine instance.

    "access_token" is always REQUIRED on dev_appengine, otherwise you will
    get 404 (auth) errors. You can get one access_token valid for a few minutes
    by typing:
      gsutil -d ls 2>&1 | grep "Bearer" |
         sed "s/.*Bearer \(.*\).r.nUser-Agent.*/access_token=\1/" )"

    A sample output would be:
      access_token=ya29.1.AADtN_VW5ibbfLHV5cMIK5ss4bHtVzBXpa4byjd

    Now add this line to the local_debug/gcs_debug.conf file and restart the
    appengine development server.

    Remember that you will need a new access_token every ten minutes or
    so. If you get 404 errors on log, update it. Access token is not
    used for a deployed appengine app, only if you use dev_appengine.py.

    remote_bucket_prefix is useful if you want to test on your own GCS buckets
    before using the real GCS buckets.

    '''
        if not environment.IsReleaseServer() and not environment.IsDevServer():
            bucket_local_path = os.path.join(LOCAL_GCS_DIR, bucket)
            if IsDirectory(bucket_local_path):
                return LocalFileSystem(bucket_local_path)
            else:
                return EmptyDirFileSystem()

        debug_access_token = None
        debug_bucket_prefix = None
        use_local_fs = False

        if environment.IsDevServer() and os.path.exists(LOCAL_GCS_DEBUG_CONF):
            with open(LOCAL_GCS_DEBUG_CONF, "r") as token_file:
                properties = dict(line.strip().split('=', 1)
                                  for line in token_file)
            use_local_fs = properties.get('use_local_fs', 'False') == 'True'
            debug_access_token = properties.get('access_token', None)
            debug_bucket_prefix = properties.get('remote_bucket_prefix', None)

        if environment.IsDevServer() and use_local_fs:
            return LocalFileSystem(os.path.join(LOCAL_GCS_DIR, bucket))

        # gcs_file_system has strong dependencies on runtime appengine APIs,
        # so we only import it when we are sure we are not on preview.py or tests.
        from gcs_file_system import CloudStorageFileSystem
        return CachingFileSystem(
            CloudStorageFileSystem(bucket, debug_access_token,
                                   debug_bucket_prefix),
            self._object_store_creator)
def _CreateLocalFs():
    return LocalFileSystem(
        os.path.join(sys.path[0], 'test_data', 'file_system'))
Beispiel #30
0
 def CreateHostFileSystemForBranch(self, channel):
     return LocalFileSystem.Create()