Ejemplo n.º 1
0
 def setUp(self):
     object_store_creator = ObjectStoreCreator.ForTest()
     self._file_system = CachingFileSystem(
         LocalFileSystem(os.path.join(sys.path[0], 'test_data')),
         object_store_creator)
     self._example_zipper = ExampleZipper(
         CompiledFileSystem.Factory(self._file_system,
                                    object_store_creator), 'example_zipper')
Ejemplo n.º 2
0
 def testReadFiles(self):
     file_system = CachingFileSystem(_CreateLocalFs(),
                                     ObjectStoreCreator.Factory())
     expected = {
         './test1.txt': 'test1\n',
         './test2.txt': 'test2\n',
         './test3.txt': 'test3\n',
     }
     self.assertEqual(
         expected,
         file_system.Read(['./test1.txt', './test2.txt',
                           './test3.txt']).Get())
Ejemplo n.º 3
0
    def CreateServerInstanceForChannel(self, channel):
        base_object_store_creator = ObjectStoreCreator(channel,
                                                       start_empty=False)
        # TODO(fj): Use OfflineFileSystem here once all json/idl files in api/
        # are pulled into data store by cron jobs.
        base_file_system = CachingFileSystem(
            self._delegate.CreateHostFileSystemForBranch(channel),
            base_object_store_creator)
        base_compiled_fs_factory = CompiledFileSystem.Factory(
            base_file_system, base_object_store_creator)

        object_store_creator = ObjectStoreCreator('trunk@%s' % self._issue,
                                                  start_empty=False)
        rietveld_patcher = CachingRietveldPatcher(
            RietveldPatcher(
                svn_constants.EXTENSIONS_PATH, self._issue,
                AppEngineUrlFetcher(url_constants.CODEREVIEW_SERVER)),
            object_store_creator)
        patched_file_system = PatchedFileSystem(base_file_system,
                                                rietveld_patcher)
        patched_compiled_fs_factory = CompiledFileSystem.Factory(
            patched_file_system, object_store_creator)

        compiled_fs_factory = ChainedCompiledFileSystem.Factory([
            (patched_compiled_fs_factory, patched_file_system),
            (base_compiled_fs_factory, base_file_system)
        ])
        return ServerInstance(
            channel, object_store_creator, patched_file_system,
            self._delegate.CreateAppSamplesFileSystem(
                base_object_store_creator), '/_patch/%s' % self._issue,
            compiled_fs_factory)
    def CreateServerInstance(self):
        object_store_creator = ObjectStoreCreator(start_empty=False)
        branch_utility = self._delegate.CreateBranchUtility(
            object_store_creator)
        host_file_system_creator = self._delegate.CreateHostFileSystemCreator(
            object_store_creator)
        # offline=False because a patch can rely on files that are already in SVN
        # repository but not yet pulled into data store by cron jobs (a typical
        # example is to add documentation for an existing API).
        base_file_system = CachingFileSystem(
            host_file_system_creator.Create(offline=False),
            object_store_creator)
        base_compiled_fs_factory = CompiledFileSystem.Factory(
            base_file_system, object_store_creator)

        rietveld_patcher = CachingRietveldPatcher(
            RietveldPatcher(
                svn_constants.EXTENSIONS_PATH, self._issue,
                AppEngineUrlFetcher(url_constants.CODEREVIEW_SERVER)),
            object_store_creator)
        patched_file_system = PatchedFileSystem(base_file_system,
                                                rietveld_patcher)
        patched_compiled_fs_factory = CompiledFileSystem.Factory(
            patched_file_system, object_store_creator)

        compiled_fs_factory = ChainedCompiledFileSystem.Factory([
            (patched_compiled_fs_factory, patched_file_system),
            (base_compiled_fs_factory, base_file_system)
        ])

        return ServerInstance(
            object_store_creator, patched_file_system,
            self._delegate.CreateAppSamplesFileSystem(object_store_creator),
            '/_patch/%s' % self._issue, compiled_fs_factory, branch_utility,
            host_file_system_creator)
Ejemplo n.º 5
0
    def CreateOnline(channel):
        '''Creates/creates an online server instance, meaning that both local and
    subversion/github resources are queried.
    '''
        branch_utility = ServerInstance._GetOrCreateBranchUtility()
        branch = branch_utility.GetBranchNumberForChannelName(channel)

        if branch == 'trunk':
            svn_url = '/'.join((url_constants.SVN_TRUNK_URL, 'src',
                                svn_constants.EXTENSIONS_PATH))
        else:
            svn_url = '/'.join((url_constants.SVN_BRANCH_URL, branch, 'src',
                                svn_constants.EXTENSIONS_PATH))

        viewvc_url = svn_url.replace(url_constants.SVN_URL,
                                     url_constants.VIEWVC_URL)

        object_store_creator_factory = ObjectStoreCreator.Factory(
            GetAppVersion(), branch, start_empty=True)

        svn_file_system = CachingFileSystem(
            SubversionFileSystem(AppEngineUrlFetcher(svn_url),
                                 AppEngineUrlFetcher(viewvc_url)),
            object_store_creator_factory)

        return ServerInstance(channel, object_store_creator_factory,
                              svn_file_system,
                              ServerInstance._GetOrCreateGithubFileSystem())
Ejemplo n.º 6
0
 def ForLocal():
     channel = 'trunk'
     object_store_creator = ObjectStoreCreator(channel,
                                               start_empty=False,
                                               store_type=TestObjectStore)
     file_system = CachingFileSystem(LocalFileSystem.Create(),
                                     object_store_creator)
     return ServerInstance(
         channel, object_store_creator, file_system, EmptyDirFileSystem(),
         '', CompiledFileSystem.Factory(file_system, object_store_creator))
    def _CreateCachingFileSystem(self, fs, use_existing_values=False):
        def object_store_constructor(namespace, start_empty=False):
            if namespace not in self._object_store_dbs:
                self._object_store_dbs[namespace] = {}
            db = self._object_store_dbs[namespace]
            if start_empty:
                db.clear()
            return TestObjectStore(namespace, init=db)

        return CachingFileSystem(fs,
                                 ObjectStoreCreator.TestFactory(
                                     store_type=object_store_constructor),
                                 use_existing_values=use_existing_values)
 def _CreateCachingFileSystem(self, fs, start_empty=False):
   def store_type_constructor(namespace, start_empty=False):
     '''Returns an ObjectStore backed onto test-lifetime-persistent objects
     in |_object_store_dbs|.
     '''
     if namespace not in self._object_store_dbs:
       self._object_store_dbs[namespace] = {}
     db = self._object_store_dbs[namespace]
     if start_empty:
       db.clear()
     return TestObjectStore(namespace, init=db)
   object_store_creator = ObjectStoreCreator(start_empty=start_empty,
                                             store_type=store_type_constructor)
   return CachingFileSystem(fs, object_store_creator)
Ejemplo n.º 9
0
 def _CreateServerInstance(self, channel, revision):
     object_store_creator = self._CreateObjectStoreCreator(channel)
     host_file_system = CachingFileSystem(
         self._delegate.CreateHostFileSystemForBranchAndRevision(
             self._GetBranchForChannel(channel), revision),
         object_store_creator)
     app_samples_file_system = self._delegate.CreateAppSamplesFileSystem(
         object_store_creator)
     compiled_host_fs_factory = CompiledFileSystem.Factory(
         host_file_system, object_store_creator)
     return ServerInstance(channel, object_store_creator, host_file_system,
                           app_samples_file_system,
                           '' if channel == 'stable' else '/%s' % channel,
                           compiled_host_fs_factory)
Ejemplo n.º 10
0
class ExampleZipperTest(unittest.TestCase):
    def setUp(self):
        object_store_creator = ObjectStoreCreator.ForTest()
        self._file_system = CachingFileSystem(
            LocalFileSystem(os.path.join(sys.path[0], 'test_data')),
            object_store_creator)
        self._example_zipper = ExampleZipper(
            CompiledFileSystem.Factory(self._file_system,
                                       object_store_creator), 'example_zipper')

    def testCreateZip(self):
        # Cache manifest.json as unicode and make sure ExampleZipper doesn't error.
        self._file_system.ReadSingle('example_zipper/basic/manifest.json')
        self.assertTrue(len(self._example_zipper.Create('basic')) > 0)
Ejemplo n.º 11
0
 def _Create(self, branch, commit=None):
     '''Creates Gitiles file systems (or if in a test, potentially whatever
 |self._constructor_for_test specifies). Wraps the resulting file system in
 an Offline file system if the offline flag is set, and finally wraps it in
 a Caching file system.
 '''
     if self._constructor_for_test is not None:
         file_system = self._constructor_for_test(branch=branch,
                                                  commit=commit)
     else:
         file_system = GitilesFileSystem.Create(branch=branch,
                                                commit=commit)
     if self._offline:
         file_system = OfflineFileSystem(file_system)
     return CachingFileSystem(file_system, self._object_store_creator)
Ejemplo n.º 12
0
 def Create(self, branch='trunk', revision=None, offline=None):
     '''Creates either SVN file systems or specialized file systems from the
 constructor passed into this instance. Wraps the resulting file system in
 an Offline file system if the offline flag is set, and finally wraps it in a
 Caching file system.
 '''
     if self._constructor_for_test is not None:
         file_system = self._constructor_for_test(branch=branch,
                                                  revision=revision)
     else:
         file_system = SubversionFileSystem.Create(branch=branch,
                                                   revision=revision)
     if offline or (offline is None and self._offline):
         file_system = OfflineFileSystem(file_system)
     return CachingFileSystem(file_system, self._object_store_creator)
    def Create(self, owner, repo):
        '''Creates a GithubFileSystem. For legacy reasons this is hacked
    such that the apps samples returns the old GithubFileSystem.

    |owner| is the owner of the GitHub account, e.g. 'GoogleChrome'.
    |repo| is the repository name, e.g. 'devtools-docs'.
    '''
        if owner == 'GoogleChrome' and repo == 'chrome-app-samples':
            # NOTE: The old GitHub file system implementation doesn't support being
            # wrapped by a CachingFileSystem. It's also too slow to run on the dev
            # server, since every app API page would need to read from it.
            return OldGithubFileSystem.CreateChromeAppsSamples(
                self._object_store_creator)
        return CachingFileSystem(
            NewGithubFileSystem.Create(owner, repo,
                                       self._object_store_creator),
            self._object_store_creator)
Ejemplo n.º 14
0
    def testListDir(self):
        file_system = CachingFileSystem(_CreateLocalFs(),
                                        ObjectStoreCreator.Factory())
        expected = ['dir/'] + ['file%d.html' % i for i in range(7)]
        file_system._read_object_store.Set(
            'list/', (expected, file_system.Stat('list/').version))
        self.assertEqual(expected, sorted(file_system.ReadSingle('list/')))

        expected.remove('file0.html')
        file_system._read_object_store.Set(
            'list/', (expected, file_system.Stat('list/').version))
        self.assertEqual(expected, sorted(file_system.ReadSingle('list/')))
Ejemplo n.º 15
0
 def GetOrCreateOffline(channel):
     '''Gets/creates a local ServerInstance, meaning that only resources local to
 the server - memcache, object store, etc, are queried. This amounts to not
 setting up the subversion nor github file systems.
 '''
     branch_utility = ServerInstance._GetOrCreateBranchUtility()
     branch = branch_utility.GetBranchNumberForChannelName(channel)
     object_store_creator_factory = ObjectStoreCreator.Factory(branch)
     # No svn nor github file systems. Rely on the crons to fill the caches, and
     # for the caches to exist.
     return ServerInstance(
         channel,
         object_store_creator_factory,
         CachingFileSystem(OfflineFileSystem(SubversionFileSystem),
                           object_store_creator_factory),
         # TODO(kalman): convert GithubFileSystem to be wrappable in a
         # CachingFileSystem so that it can be replaced with an
         # OfflineFileSystem. Currently GFS doesn't set the child versions of
         # stat requests so it doesn't.
         ServerInstance._GetOrCreateGithubFileSystem())
    def Create(self, bucket):
        '''Creates a CloudStorageFileSystemProvider.

    |bucket| is the name of GCS bucket, eg devtools-docs. It is expected
             that this bucket has Read permission for this app in its ACLs.

    Optional configuration can be set in a local_debug/gcs_debug.conf file:
      use_local_fs=True|False
      remote_bucket_prefix=<prefix>

    If running in Preview mode or in Development mode with use_local_fs set to
    True, buckets and files are looked for inside the local_debug folder instead
    of in the real GCS server.
    '''
        if IsTest():
            return EmptyDirFileSystem()

        debug_bucket_prefix = None
        use_local_fs = False
        if os.path.exists(LOCAL_GCS_DEBUG_CONF):
            with open(LOCAL_GCS_DEBUG_CONF, "r") as token_file:
                properties = dict(line.strip().split('=', 1)
                                  for line in token_file)
            use_local_fs = properties.get('use_local_fs', 'False') == 'True'
            debug_bucket_prefix = properties.get('remote_bucket_prefix', None)
            logging.debug('gcs: prefixing all bucket names with %s' %
                          debug_bucket_prefix)

        if use_local_fs:
            return LocalFileSystem(
                ToDirectory(os.path.join(LOCAL_GCS_DIR, bucket)))

        if debug_bucket_prefix:
            bucket = debug_bucket_prefix + bucket

        return CachingFileSystem(CloudStorageFileSystem(bucket),
                                 self._object_store_creator)
Ejemplo n.º 17
0
class TestGithubFileSystem(unittest.TestCase):
    def setUp(self):
        self._gfs = GithubFileSystem.ForTest('repo',
                                             FakeURLFSFetcher.CreateLocal())
        # Start and finish the repository load.
        self._cgfs = CachingFileSystem(self._gfs, ObjectStoreCreator.ForTest())

    def testReadDirectory(self):
        self._gfs.Refresh().Get()
        self.assertEqual(
            sorted(['requirements.txt', '.gitignore', 'README.md', 'src/']),
            sorted(self._gfs.ReadSingle('').Get()))
        self.assertEqual(sorted(['__init__.notpy', 'hello.notpy']),
                         sorted(self._gfs.ReadSingle('src/').Get()))

    def testReadFile(self):
        self._gfs.Refresh().Get()
        expected = ('# Compiled Python files\n' '*.pyc\n')
        self.assertEqual(expected, self._gfs.ReadSingle('.gitignore').Get())

    def testMultipleReads(self):
        self._gfs.Refresh().Get()
        self.assertEqual(
            self._gfs.ReadSingle('requirements.txt').Get(),
            self._gfs.ReadSingle('requirements.txt').Get())

    def testReads(self):
        self._gfs.Refresh().Get()
        expected = {
            'src/': sorted(['hello.notpy', '__init__.notpy']),
            '': sorted(['requirements.txt', '.gitignore', 'README.md', 'src/'])
        }

        read = self._gfs.Read(['', 'src/']).Get()
        self.assertEqual(expected['src/'], sorted(read['src/']))
        self.assertEqual(expected[''], sorted(read['']))

    def testStat(self):
        self._gfs.Refresh().Get()
        dir_stat = StatInfo(
            FAKE_HASH, {
                'hello.notpy': StatInfo(FAKE_HASH),
                '__init__.notpy': StatInfo(FAKE_HASH)
            })

        self.assertEqual(StatInfo(FAKE_HASH), self._gfs.Stat('README.md'))
        self.assertEqual(StatInfo(FAKE_HASH),
                         self._gfs.Stat('src/hello.notpy'))
        self.assertEqual(dir_stat, self._gfs.Stat('src/'))

    def testBadReads(self):
        self._gfs.Refresh().Get()
        self.assertRaises(FileNotFoundError, self._gfs.Stat, 'DONT_README.md')
        self.assertRaises(FileNotFoundError,
                          self._gfs.ReadSingle('DONT_README.md').Get)

    def testCachingFileSystem(self):
        self._cgfs.Refresh().Get()
        initial_cgfs_read_one = self._cgfs.ReadSingle('src/hello.notpy').Get()

        self.assertEqual(initial_cgfs_read_one,
                         self._gfs.ReadSingle('src/hello.notpy').Get())
        self.assertEqual(initial_cgfs_read_one,
                         self._cgfs.ReadSingle('src/hello.notpy').Get())

        initial_cgfs_read_two = self._cgfs.Read(
            ['README.md', 'requirements.txt']).Get()

        self.assertEqual(
            initial_cgfs_read_two,
            self._gfs.Read(['README.md', 'requirements.txt']).Get())
        self.assertEqual(
            initial_cgfs_read_two,
            self._cgfs.Read(['README.md', 'requirements.txt']).Get())

    def testWithoutRefresh(self):
        self.assertRaises(FileNotFoundError, self._gfs.ReadSingle('src/').Get)

    def testRefresh(self):
        def make_sha_json(hash_value):
            from copy import deepcopy
            commit_json = json.loads(
                deepcopy(
                    LocalFileSystem('').ReadSingle(
                        'test_data/github_file_system/test_owner/repo/commits/HEAD'
                    ).Get()))
            commit_json['commit']['tree']['sha'] = 'hash' + FAKE_HASH[4:]

            return json.dumps(commit_json)

        files = {
            'zipfile/': '',
            'zipfile/hello.txt': 'world',
            'zipfile/readme': 'test zip',
            'zipfile/dir/file1': 'contents',
            'zipfile/dir/file2': 'more contents'
        }

        string = _ZipFromFiles(files)

        test_files = {
            'test_owner': {
                'changing-repo': {
                    'commits': {
                        'HEAD': make_sha_json(FAKE_HASH)
                    },
                    'zipball': string
                }
            }
        }

        test_file_system = TestFileSystem(test_files)

        gfs = GithubFileSystem.ForTest(
            'changing-repo',
            FakeURLFSFetcher.Create(test_file_system),
            path='')

        gfs.Refresh().Get()
        initial_dir_read = sorted(gfs.ReadSingle('').Get())
        initial_file_read = gfs.ReadSingle('dir/file1').Get()

        # Change the underlying data.
        files['zipfile/hello.txt'] = 'hello world again'
        files['zipfile/new-file'] = 'a new file'
        files['zipfile/dir/file1'] = 'changed file'
        test_files['test_owner']['changing-repo']['zipball'] = _ZipFromFiles(
            files)
        test_files['test_owner']['changing-repo']['commits']['HEAD'] = (
            make_sha_json(FAKE_HASH + 'hash'))

        # Check that changes have not effected the file system yet.
        self.assertEqual(initial_dir_read, sorted(gfs.ReadSingle('').Get()))
        self.assertEqual(initial_file_read, gfs.ReadSingle('dir/file1').Get())

        gfs.Refresh().Get()

        # Check that the changes have effected the file system.
        self.assertTrue('new-file' in gfs.ReadSingle('').Get())
        self.assertEqual(files['zipfile/dir/file1'],
                         gfs.ReadSingle('dir/file1').Get())
Ejemplo n.º 18
0
    def testCaching(self):
        fake_fs = TestFileSystem({
            'bob': {
                'bob0': 'bob/bob0 contents',
                'bob1': 'bob/bob1 contents',
                'bob2': 'bob/bob2 contents',
                'bob3': 'bob/bob3 contents',
            }
        })
        file_system = CachingFileSystem(fake_fs, ObjectStoreCreator.Factory())

        self.assertEqual('bob/bob0 contents',
                         file_system.ReadSingle('bob/bob0'))
        self.assertTrue(fake_fs.CheckAndReset(read_count=1, stat_count=1))

        # Resource has been cached, so test resource is not re-fetched.
        self.assertEqual('bob/bob0 contents',
                         file_system.ReadSingle('bob/bob0'))
        self.assertTrue(fake_fs.CheckAndReset())

        # Test if the Stat version is the same the resource is not re-fetched.
        file_system._stat_object_store.Delete('bob/bob0')
        self.assertEqual('bob/bob0 contents',
                         file_system.ReadSingle('bob/bob0'))
        self.assertTrue(fake_fs.CheckAndReset(stat_count=1))

        # Test if there is a newer version, the resource is re-fetched.
        file_system._stat_object_store.Delete('bob/bob0')
        fake_fs.IncrementStat()
        self.assertEqual('bob/bob0 contents',
                         file_system.ReadSingle('bob/bob0'))
        self.assertTrue(fake_fs.CheckAndReset(read_count=1, stat_count=1))

        # Test directory and subdirectory stats are cached.
        file_system._stat_object_store.Delete('bob/bob0')
        file_system._read_object_store.Delete('bob/bob0')
        file_system._stat_object_store.Delete('bob/bob1')
        fake_fs.IncrementStat()
        self.assertEqual('bob/bob1 contents',
                         file_system.ReadSingle('bob/bob1'))
        self.assertEqual('bob/bob0 contents',
                         file_system.ReadSingle('bob/bob0'))
        self.assertTrue(fake_fs.CheckAndReset(read_count=2, stat_count=1))
        self.assertEqual('bob/bob1 contents',
                         file_system.ReadSingle('bob/bob1'))
        self.assertTrue(fake_fs.CheckAndReset())

        # Test a more recent parent directory doesn't force a refetch of children.
        file_system._read_object_store.Delete('bob/bob0')
        file_system._read_object_store.Delete('bob/bob1')
        self.assertEqual('bob/bob1 contents',
                         file_system.ReadSingle('bob/bob1'))
        self.assertEqual('bob/bob2 contents',
                         file_system.ReadSingle('bob/bob2'))
        self.assertEqual('bob/bob3 contents',
                         file_system.ReadSingle('bob/bob3'))
        self.assertTrue(fake_fs.CheckAndReset(read_count=3))
        fake_fs.IncrementStat(path='bob/')
        self.assertEqual('bob/bob1 contents',
                         file_system.ReadSingle('bob/bob1'))
        self.assertEqual('bob/bob2 contents',
                         file_system.ReadSingle('bob/bob2'))
        self.assertEqual('bob/bob3 contents',
                         file_system.ReadSingle('bob/bob3'))
        self.assertTrue(fake_fs.CheckAndReset())

        file_system._stat_object_store.Delete('bob/bob0')
        self.assertEqual('bob/bob0 contents',
                         file_system.ReadSingle('bob/bob0'))
        self.assertTrue(fake_fs.CheckAndReset(read_count=1, stat_count=1))
        self.assertEqual('bob/bob0 contents',
                         file_system.ReadSingle('bob/bob0'))
        self.assertTrue(fake_fs.CheckAndReset())
Ejemplo n.º 19
0
class TestGithubFileSystem(unittest.TestCase):
  def setUp(self):
    self._gfs = GithubFileSystem.ForTest(
        'repo/', partial(FakeURLFSFetcher, LocalFileSystem('')))
    # Start and finish the repository load.
    self._cgfs = CachingFileSystem(self._gfs, ObjectStoreCreator.ForTest())

  def testReadDirectory(self):
    self._gfs.Refresh().Get()
    self.assertEqual(
        sorted(['requirements.txt', '.gitignore', 'README.md', 'src/']),
        sorted(self._gfs.ReadSingle('').Get()))
    self.assertEqual(
        sorted(['__init__.notpy', 'hello.notpy']),
        sorted(self._gfs.ReadSingle('src/').Get()))

  def testReadFile(self):
    self._gfs.Refresh().Get()
    expected = (
      '# Compiled Python files\n'
      '*.pyc\n'
    )
    self.assertEqual(expected, self._gfs.ReadSingle('.gitignore').Get())

  def testMultipleReads(self):
    self._gfs.Refresh().Get()
    self.assertEqual(
        self._gfs.ReadSingle('requirements.txt').Get(),
        self._gfs.ReadSingle('requirements.txt').Get())

  def testReads(self):
    self._gfs.Refresh().Get()
    expected = {
        'src/': sorted(['hello.notpy', '__init__.notpy']),
        '': sorted(['requirements.txt', '.gitignore', 'README.md', 'src/'])
    }

    read = self._gfs.Read(['', 'src/']).Get()
    self.assertEqual(expected['src/'], sorted(read['src/']))
    self.assertEqual(expected[''], sorted(read['']))

  def testStat(self):
    # This is the hash value from the zip on disk.
    real_hash = 'c36fc23688a9ec9e264d3182905dc0151bfff7d7'

    self._gfs.Refresh().Get()
    dir_stat = StatInfo(real_hash, {
      'hello.notpy': StatInfo(real_hash),
      '__init__.notpy': StatInfo(real_hash)
    })

    self.assertEqual(StatInfo(real_hash), self._gfs.Stat('README.md'))
    self.assertEqual(StatInfo(real_hash), self._gfs.Stat('src/hello.notpy'))
    self.assertEqual(dir_stat, self._gfs.Stat('src/'))

  def testBadReads(self):
    self._gfs.Refresh().Get()
    self.assertRaises(FileNotFoundError, self._gfs.Stat, 'DONT_README.md')
    self.assertRaises(FileNotFoundError,
                      self._gfs.ReadSingle('DONT_README.md').Get)

  def testCachingFileSystem(self):
    self._cgfs.Refresh().Get()
    initial_cgfs_read_one = self._cgfs.ReadSingle('src/hello.notpy').Get()

    self.assertEqual(initial_cgfs_read_one,
                     self._gfs.ReadSingle('src/hello.notpy').Get())
    self.assertEqual(initial_cgfs_read_one,
                     self._cgfs.ReadSingle('src/hello.notpy').Get())

    initial_cgfs_read_two = self._cgfs.Read(
        ['README.md', 'requirements.txt']).Get()

    self.assertEqual(
        initial_cgfs_read_two,
        self._gfs.Read(['README.md', 'requirements.txt']).Get())
    self.assertEqual(
        initial_cgfs_read_two,
        self._cgfs.Read(['README.md', 'requirements.txt']).Get())

  def testWithoutRefresh(self):
    # Without refreshing it will still read the content from blobstore, and it
    # does this via the magic of the FakeURLFSFetcher.
    self.assertEqual(['__init__.notpy', 'hello.notpy'],
                     sorted(self._gfs.ReadSingle('src/').Get()))

  def testRefresh(self):
    test_bundle = _TestBundle()
    gfs, fetcher = test_bundle.CreateGfsAndFetcher()

    # It shouldn't fetch until Refresh does so; then it will do 2, one for the
    # stat, and another for the read.
    self.assertTrue(*fetcher.CheckAndReset())
    gfs.Refresh().Get()
    self.assertTrue(*fetcher.CheckAndReset(fetch_count=1,
                                           fetch_async_count=1,
                                           fetch_resolve_count=1))

    # Refresh is just an alias for Read('').
    gfs.Refresh().Get()
    self.assertTrue(*fetcher.CheckAndReset())

    initial_dir_read = sorted(gfs.ReadSingle('').Get())
    initial_file_read = gfs.ReadSingle('dir/file1').Get()

    version, data = test_bundle.Mutate()

    # Check that changes have not effected the file system yet.
    self.assertEqual(initial_dir_read, sorted(gfs.ReadSingle('').Get()))
    self.assertEqual(initial_file_read, gfs.ReadSingle('dir/file1').Get())
    self.assertNotEqual(StatInfo(version), gfs.Stat(''))

    gfs, fetcher = test_bundle.CreateGfsAndFetcher()
    gfs.Refresh().Get()
    self.assertTrue(*fetcher.CheckAndReset(fetch_count=1,
                                           fetch_async_count=1,
                                           fetch_resolve_count=1))

    # Check that the changes have affected the file system.
    self.assertEqual(data, gfs.ReadSingle('new-file').Get())
    self.assertEqual(test_bundle.files['zipfile/dir/file1'],
                     gfs.ReadSingle('dir/file1').Get())
    self.assertEqual(StatInfo(version), gfs.Stat('new-file'))

    # Regression test: ensure that reading the data after it's been mutated,
    # but before Refresh() has been realised, still returns the correct data.
    gfs, fetcher = test_bundle.CreateGfsAndFetcher()
    version, data = test_bundle.Mutate()

    refresh_future = gfs.Refresh()
    self.assertTrue(*fetcher.CheckAndReset(fetch_count=1, fetch_async_count=1))

    self.assertEqual(data, gfs.ReadSingle('new-file').Get())
    self.assertEqual(test_bundle.files['zipfile/dir/file1'],
                     gfs.ReadSingle('dir/file1').Get())
    self.assertEqual(StatInfo(version), gfs.Stat('new-file'))

    refresh_future.Get()
    self.assertTrue(*fetcher.CheckAndReset(fetch_resolve_count=1))

  def testGetThenRefreshOnStartup(self):
    # Regression test: Test that calling Get() but never resolving the future,
    # then Refresh()ing the data, causes the data to be refreshed.
    test_bundle = _TestBundle()
    gfs, fetcher = test_bundle.CreateGfsAndFetcher()
    self.assertTrue(*fetcher.CheckAndReset())

    # Get a predictable version.
    version, data = test_bundle.Mutate()

    read_future = gfs.ReadSingle('hello.txt')
    # Fetch for the Stat(), async-fetch for the Read().
    self.assertTrue(*fetcher.CheckAndReset(fetch_count=1, fetch_async_count=1))

    refresh_future = gfs.Refresh()
    self.assertTrue(*fetcher.CheckAndReset())

    self.assertEqual(data, read_future.Get())
    self.assertTrue(*fetcher.CheckAndReset(fetch_resolve_count=1))
    self.assertEqual(StatInfo(version), gfs.Stat('hello.txt'))
    self.assertTrue(*fetcher.CheckAndReset())

    # The fetch will already have been resolved, so resolving the Refresh won't
    # affect anything.
    refresh_future.Get()
    self.assertTrue(*fetcher.CheckAndReset())

    # Read data should not have changed.
    self.assertEqual(data, gfs.ReadSingle('hello.txt').Get())
    self.assertEqual(StatInfo(version), gfs.Stat('hello.txt'))
    self.assertTrue(*fetcher.CheckAndReset())
    def Create(self, bucket):
        '''Creates a CloudStorageFileSystemProvider.

    |bucket| is the name of GCS bucket, eg devtools-docs. It is expected
             that this bucket has Read permission for this app in its ACLs.

    Optional configuration can be set in a local_debug/gcs_debug.conf file:
      use_local_fs=True|False
      access_token=<token>
      remote_bucket_prefix=<prefix>

    If running in Preview mode or in Development mode with use_local_fs set to
    True, buckets and files are looked inside the local_debug folder instead
    of in the real GCS server. Preview server does not support direct GCS
    access, so it is always forced to use a LocalFileSystem.

    For real GCS access in the Development mode (dev_appserver.py),
    access_token and remote_bucket_prefix options can be
    used to change the way GCS files are accessed. Both are ignored in a real
    appengine instance.

    "access_token" is always REQUIRED on dev_appengine, otherwise you will
    get 404 (auth) errors. You can get one access_token valid for a few minutes
    by typing:
      gsutil -d ls 2>&1 | grep "Bearer" |
         sed "s/.*Bearer \(.*\).r.nUser-Agent.*/access_token=\1/" )"

    A sample output would be:
      access_token=ya29.1.AADtN_VW5ibbfLHV5cMIK5ss4bHtVzBXpa4byjd

    Now add this line to the local_debug/gcs_debug.conf file and restart the
    appengine development server.

    Remember that you will need a new access_token every ten minutes or
    so. If you get 404 errors on log, update it. Access token is not
    used for a deployed appengine app, only if you use dev_appengine.py.

    remote_bucket_prefix is useful if you want to test on your own GCS buckets
    before using the real GCS buckets.

    '''
        if not environment.IsReleaseServer() and not environment.IsDevServer():
            bucket_local_path = os.path.join(LOCAL_GCS_DIR, bucket)
            if IsDirectory(bucket_local_path):
                return LocalFileSystem(bucket_local_path)
            else:
                return EmptyDirFileSystem()

        debug_access_token = None
        debug_bucket_prefix = None
        use_local_fs = False

        if environment.IsDevServer() and os.path.exists(LOCAL_GCS_DEBUG_CONF):
            with open(LOCAL_GCS_DEBUG_CONF, "r") as token_file:
                properties = dict(line.strip().split('=', 1)
                                  for line in token_file)
            use_local_fs = properties.get('use_local_fs', 'False') == 'True'
            debug_access_token = properties.get('access_token', None)
            debug_bucket_prefix = properties.get('remote_bucket_prefix', None)

        if environment.IsDevServer() and use_local_fs:
            return LocalFileSystem(os.path.join(LOCAL_GCS_DIR, bucket))

        # gcs_file_system has strong dependencies on runtime appengine APIs,
        # so we only import it when we are sure we are not on preview.py or tests.
        from gcs_file_system import CloudStorageFileSystem
        return CachingFileSystem(
            CloudStorageFileSystem(bucket, debug_access_token,
                                   debug_bucket_prefix),
            self._object_store_creator)
Ejemplo n.º 21
0
 def setUp(self):
   self._gfs = GithubFileSystem.ForTest(
       'repo/', partial(FakeURLFSFetcher, LocalFileSystem('')))
   # Start and finish the repository load.
   self._cgfs = CachingFileSystem(self._gfs, ObjectStoreCreator.ForTest())