def _CreateContentProvider(self, name, config): supports_templates = config.get('supportsTemplates', False) supports_zip = config.get('supportsZip', False) if 'chromium' in config: chromium_config = config['chromium'] if 'dir' not in chromium_config: logging.error('%s: "chromium" must have a "dir" property' % name) return None file_system = ChrootFileSystem(self._host_file_system, chromium_config['dir']) elif 'github' in config: github_config = config['github'] if 'owner' not in github_config or 'repo' not in github_config: logging.error( '%s: "github" must provide an "owner" and "repo"' % name) return None file_system = self._github_file_system_provider.Create( github_config['owner'], github_config['repo']) if 'dir' in github_config: file_system = ChrootFileSystem(file_system, github_config['dir']) else: logging.error('%s: content provider type "%s" not supported' % (name, type_)) return None return ContentProvider(name, self._compiled_fs_factory, file_system, supports_templates=supports_templates, supports_zip=supports_zip)
def _CreateContentProvider(self, name, config): default_extensions = config.get('defaultExtensions', ()) supports_templates = config.get('supportsTemplates', False) supports_zip = config.get('supportsZip', False) if 'chromium' in config: chromium_config = config['chromium'] if 'dir' not in chromium_config: logging.error('%s: "chromium" must have a "dir" property' % name) return None file_system = ChrootFileSystem(self._host_file_system, chromium_config['dir']) elif 'gitiles' in config: gitiles_config = config['gitiles'] if 'dir' not in gitiles_config: logging.error('%s: "gitiles" must have a "dir" property' % name) return None file_system = ChrootFileSystem(GitilesFileSystem.Create(), gitiles_config['dir']) elif 'gcs' in config: gcs_config = config['gcs'] if 'bucket' not in gcs_config: logging.error('%s: "gcs" must have a "bucket" property' % name) return None bucket = gcs_config['bucket'] if not bucket.startswith('gs://'): logging.error('%s: bucket %s should start with gs://' % (name, bucket)) return None bucket = bucket[len('gs://'):] file_system = self._gcs_file_system_provider.Create(bucket) if 'dir' in gcs_config: file_system = ChrootFileSystem(file_system, gcs_config['dir']) elif 'github' in config: github_config = config['github'] if 'owner' not in github_config or 'repo' not in github_config: logging.error( '%s: "github" must provide an "owner" and "repo"' % name) return None file_system = self._github_file_system_provider.Create( github_config['owner'], github_config['repo']) if 'dir' in github_config: file_system = ChrootFileSystem(file_system, github_config['dir']) else: logging.error('%s: content provider type not supported' % name) return None return ContentProvider(name, self._compiled_fs_factory, file_system, self._object_store_creator, default_extensions=default_extensions, supports_templates=supports_templates, supports_zip=supports_zip)
def _CreateContentProvider(self, name, config): default_extensions = config.get('defaultExtensions', ()) supports_templates = config.get('supportsTemplates', False) supports_zip = config.get('supportsZip', False) if 'chromium' in config: chromium_config = config['chromium'] if 'dir' not in chromium_config: logging.error('%s: "chromium" must have a "dir" property' % name) return None file_system = ChrootFileSystem(self._host_file_system, chromium_config['dir']) # TODO(rockot): Remove this in a future patch. It should not be needed once # the new content_providers.json is committed. elif 'gitiles' in config: chromium_config = config['gitiles'] if 'dir' not in chromium_config: logging.error('%s: "chromium" must have a "dir" property' % name) return None file_system = ChrootFileSystem(self._host_file_system, chromium_config['dir']) elif 'gcs' in config: gcs_config = config['gcs'] if 'bucket' not in gcs_config: logging.error('%s: "gcs" must have a "bucket" property' % name) return None bucket = gcs_config['bucket'] if not bucket.startswith('gs://'): logging.error('%s: bucket %s should start with gs://' % (name, bucket)) return None bucket = bucket[len('gs://'):] file_system = self._gcs_file_system_provider.Create(bucket) if 'dir' in gcs_config: file_system = ChrootFileSystem(file_system, gcs_config['dir']) else: logging.error('%s: content provider type not supported' % name) return None return ContentProvider(name, self._compiled_fs_factory, file_system, self._object_store_creator, default_extensions=default_extensions, supports_templates=supports_templates, supports_zip=supports_zip)
def testRead(self): for prefix in ('', '/'): for suffix in ('', '/'): chroot_fs = ChrootFileSystem( self._test_fs, prefix + 'extensions/manifest' + suffix) self.assertEqual( { 'moremanifest/usb.html': 'usb.html contents', '': [ 'moremanifest/', 'sockets.html', ], 'moremanifest/': ['csp.html', 'usb.html'], 'sockets.html': 'sockets.html contents', }, _SortListValues( chroot_fs.Read( ('moremanifest/usb.html', '', 'moremanifest/', 'sockets.html')).Get()))
def testStat(self): self._test_fs.IncrementStat('extensions/manifest/sockets.html', by=2) self._test_fs.IncrementStat( 'extensions/manifest/moremanifest/csp.html') for prefix in ('', '/'): for suffix in ('', '/'): chroot_fs = ChrootFileSystem(self._test_fs, prefix + 'extensions' + suffix) self.assertEqual( StatInfo('2', child_versions={ 'activeTab.html': '0', 'alarms.html': '0', 'manifest/': '2', }), chroot_fs.Stat('')) self.assertEqual(StatInfo('0'), chroot_fs.Stat('activeTab.html')) self.assertEqual( StatInfo('2', child_versions={ 'moremanifest/': '1', 'sockets.html': '2', }), chroot_fs.Stat('manifest/')) self.assertEqual(StatInfo('2'), chroot_fs.Stat('manifest/sockets.html')) self.assertEqual( StatInfo('1', child_versions={ 'csp.html': '1', 'usb.html': '0', }), chroot_fs.Stat('manifest/moremanifest/')) self.assertEqual( StatInfo('1'), chroot_fs.Stat('manifest/moremanifest/csp.html')) self.assertEqual( StatInfo('0'), chroot_fs.Stat('manifest/moremanifest/usb.html'))
def testEmptyRoot(self): chroot_fs = ChrootFileSystem(self._test_fs, '') self.assertEqual('404.html contents', chroot_fs.ReadSingle('404.html').Get())
def testIdentity(self): chroot_fs1 = ChrootFileSystem(self._test_fs, '1') chroot_fs1b = ChrootFileSystem(self._test_fs, '1') chroot_fs2 = ChrootFileSystem(self._test_fs, '2') self.assertNotEqual(self._test_fs.GetIdentity(), chroot_fs1.GetIdentity()) self.assertNotEqual(self._test_fs.GetIdentity(), chroot_fs2.GetIdentity()) self.assertNotEqual(chroot_fs1.GetIdentity(), chroot_fs2.GetIdentity()) self.assertEqual(chroot_fs1.GetIdentity(), chroot_fs1b.GetIdentity())
def testCronAndPublicFiles(self): '''Runs cron then requests every public file. Cron needs to be run first because the public file requests are offline. ''' if _EXPLICIT_TEST_FILES is not None: return print('Running cron...') start_time = time.time() try: response = Handler(Request.ForTest('/_cron')).Get() self.assertEqual(200, response.status) self.assertEqual('Success', response.content.ToString()) finally: print('Took %s seconds' % (time.time() - start_time)) print("Checking for broken links...") start_time = time.time() link_error_detector = LinkErrorDetector( # TODO(kalman): Use of ChrootFileSystem here indicates a hack. Fix. ChrootFileSystem(LocalFileSystem.Create(), EXTENSIONS), lambda path: Handler(Request.ForTest(path)).Get(), 'templates/public', ('extensions/index.html', 'apps/about_apps.html')) broken_links = link_error_detector.GetBrokenLinks() if broken_links and _VERBOSE: print('The broken links are:') print(StringifyBrokenLinks(broken_links)) broken_links_set = set(broken_links) known_broken_links_path = os.path.join( sys.path[0], 'known_broken_links.json') try: with open(known_broken_links_path, 'r') as f: # The JSON file converts tuples and sets into lists, and for this # set union/difference logic they need to be converted back. known_broken_links = set(tuple(item) for item in json.load(f)) except IOError: known_broken_links = set() newly_broken_links = broken_links_set - known_broken_links fixed_links = known_broken_links - broken_links_set if _REBASE: print('Rebasing broken links with %s newly broken and %s fixed links.' % (len(newly_broken_links), len(fixed_links))) with open(known_broken_links_path, 'w') as f: json.dump(broken_links, f, indent=2, separators=(',', ': '), sort_keys=True) else: if fixed_links or newly_broken_links: print('Found %s broken links, and some have changed. ' 'If this is acceptable or expected then run %s with the --rebase ' 'option.' % (len(broken_links), os.path.split(__file__)[-1])) elif broken_links: print('Found %s broken links, but there were no changes.' % len(broken_links)) if fixed_links: print('%s broken links have been fixed:' % len(fixed_links)) print(StringifyBrokenLinks(fixed_links)) if newly_broken_links: print('There are %s new broken links:' % len(newly_broken_links)) print(StringifyBrokenLinks(newly_broken_links)) self.fail('See logging for details.') print('Took %s seconds.' % (time.time() - start_time)) print('Searching for orphaned pages...') start_time = time.time() orphaned_pages = link_error_detector.GetOrphanedPages() if orphaned_pages: # TODO(jshumway): Test should fail when orphaned pages are detected. print('Warning: Found %d orphaned pages:' % len(orphaned_pages)) for page in orphaned_pages: print(page) print('Took %s seconds.' % (time.time() - start_time)) public_files = _GetPublicFiles() print('Rendering %s public files...' % len(public_files.keys())) start_time = time.time() try: for path, content in public_files.iteritems(): if path.endswith('redirects.json'): continue def check_result(response): self.assertEqual(200, response.status, 'Got %s when rendering %s' % (response.status, path)) # This is reaaaaally rough since usually these will be tiny templates # that render large files. At least it'll catch zero-length responses. self.assertTrue(len(response.content) >= len(content), 'Content was "%s" when rendering %s' % (response.content, path)) check_result(Handler(Request.ForTest(path)).Get()) # Make sure that leaving out the .html will temporarily redirect to the # path with the .html. if path.startswith(('apps/', 'extensions/')): redirect_result = Handler( Request.ForTest(posixpath.splitext(path)[0])).Get() self.assertEqual((path, False), redirect_result.GetRedirect()) # Make sure including a channel will permanently redirect to the same # path without a channel. for channel in BranchUtility.GetAllChannelNames(): redirect_result = Handler( Request.ForTest('%s/%s' % (channel, path))).Get() self.assertEqual((path, True), redirect_result.GetRedirect()) # Samples are internationalized, test some locales. if path.endswith('/samples.html'): for lang in ['en-US', 'es', 'ar']: check_result(Handler(Request.ForTest( path, headers={'Accept-Language': '%s;q=0.8' % lang})).Get()) finally: print('Took %s seconds' % (time.time() - start_time))