def _getLog(self, start, limit=None, stop=None, union_repository=None, enable_hosting=None, enable_memcache=None, logger=None): if enable_hosting is None: enable_hosting = not getFeatureFlag( u"code.git.log.disable_hosting") if enable_memcache is None: enable_memcache = not getFeatureFlag( u"code.git.log.disable_memcache") path = self.repository.getInternalPath() if (union_repository is not None and union_repository != self.repository): path = "%s:%s" % (union_repository.getInternalPath(), path) log = None if enable_memcache: memcache_client = getUtility(IMemcacheClient) instance_name = urlsplit( config.codehosting.internal_git_api_endpoint).hostname memcache_key = "%s:git-log:%s:%s" % (instance_name, path, start) if limit is not None: memcache_key += ":limit=%s" % limit if stop is not None: memcache_key += ":stop=%s" % stop memcache_key = six.ensure_binary(memcache_key) cached_log = memcache_client.get(memcache_key) if cached_log is not None: try: log = json.loads(cached_log) except Exception: if logger is not None: logger.exception( "Cannot load cached log information for %s:%s; " "deleting" % (path, start)) memcache_client.delete(memcache_key) if log is None: if enable_hosting: hosting_client = getUtility(IGitHostingClient) log = removeSecurityProxy(hosting_client.getLog( path, start, limit=limit, stop=stop, logger=logger)) if enable_memcache: memcache_client.set(memcache_key, json.dumps(log)) else: # Fall back to synthesising something reasonable based on # information in our own database. log = [{ "sha1": self.commit_sha1, "message": self.commit_message, "author": None if self.author is None else { "name": self.author.name_without_email, "email": self.author.email, "time": seconds_since_epoch(self.author_date), }, "committer": None if self.committer is None else { "name": self.committer.name_without_email, "email": self.committer.email, "time": seconds_since_epoch(self.committer_date), }, }] return log
def test_setting_one_flag_with_manager(self): flag = self.getUniqueString() value_outside_manager = getFeatureFlag(flag) value_in_manager = None with FeatureFixture({flag: u'on'}): value_in_manager = getFeatureFlag(flag) self.assertEqual(value_in_manager, u'on') self.assertEqual(value_outside_manager, getFeatureFlag(flag)) self.assertNotEqual(value_outside_manager, value_in_manager)
def test_fixture_sets_one_flag_and_cleans_up_again(self): flag = self.getUniqueString() value_before_fixture_setup = getFeatureFlag(flag) value_after_fixture_setup = None fixture = FeatureFixture({flag: 'on'}) fixture.setUp() value_after_fixture_setup = getFeatureFlag(flag) fixture.cleanUp() self.assertEqual(value_after_fixture_setup, 'on') self.assertEqual(value_before_fixture_setup, getFeatureFlag(flag)) self.assertNotEqual( value_before_fixture_setup, value_after_fixture_setup)
def __init__(self, con): self.swift_enabled = getFeatureFlag('librarian.swift.enabled') or False self.con = con self.index = 1 self.total_deleted = 0 log.info("Deleting unreferenced LibraryFileContents.") cur = con.cursor() drop_tables(cur, "UnreferencedLibraryFileContent") cur.execute(""" CREATE TEMPORARY TABLE UnreferencedLibraryFileContent ( id bigserial PRIMARY KEY, content bigint UNIQUE) """) cur.execute(""" INSERT INTO UnreferencedLibraryFileContent (content) SELECT DISTINCT LibraryFileContent.id FROM LibraryFileContent LEFT OUTER JOIN LibraryFileAlias ON LibraryFileContent.id = LibraryFileAlias.content WHERE LibraryFileAlias.content IS NULL """) cur.execute(""" SELECT COALESCE(max(id), 0) FROM UnreferencedLibraryFileContent """) self.max_id = cur.fetchone()[0] log.info("%d unreferenced LibraryFileContents to remove." % self.max_id)
def setUpSubWidgets(self): if self._widgets_set_up: return if bool(getFeatureFlag('disclosure.dsp_picker.enabled')): # Replace the default field with a field that uses the better # vocabulary. package_vocab = 'DistributionSourcePackage' else: package_vocab = 'BinaryAndSourcePackageName' fields = [ Choice(__name__='product', title=u'Project', required=True, vocabulary=self.getProductVocabulary()), Choice(__name__='distribution', title=u"Distribution", required=True, vocabulary=self.getDistributionVocabulary(), default=getUtility(ILaunchpadCelebrities).ubuntu), Choice(__name__='package', title=u"Package", required=False, vocabulary=package_vocab), ] self.distribution_widget = CustomWidgetFactory(LaunchpadDropdownWidget) for field in fields: setUpWidget(self, field.__name__, field, IInputWidget, prefix=self.name) self._widgets_set_up = True
def get_int_feature_flag(flag): value_str = getFeatureFlag(flag) if value_str is not None: try: return int(value_str) except ValueError: logger.error('invalid %s %r', flag, value_str)
def yui_version(self): """The version of YUI we are using.""" value = getFeatureFlag('js.yui_version') if not value: return 'yui' else: return value
def setUpSubWidgets(self): if self._widgets_set_up: return if bool(getFeatureFlag("disclosure.dsp_picker.enabled")): # Replace the default field with a field that uses the better # vocabulary. package_vocab = "DistributionSourcePackage" else: package_vocab = "BinaryAndSourcePackageName" fields = [ Choice(__name__="project", title=u"Project", required=True, vocabulary="Product"), Choice(__name__="distribution", title=u"Distribution", required=True, vocabulary="Distribution", default=getUtility(ILaunchpadCelebrities).ubuntu), Choice(__name__="package", title=u"Package", required=False, vocabulary=package_vocab), ] if not self._read_only: self.distribution_widget = CustomWidgetFactory( LaunchpadDropdownWidget) for field in fields: setUpWidget(self, field.__name__, field, self._sub_widget_interface, prefix=self.name) self._widgets_set_up = True
def related_feature_info(self): """Related feature flags that are active for this context and scope. This property describes all features marked as related_features in the view. is_beta means that the value is not the default value. Return a dict of flags keyed by flag_name, with title and url as given by the flag's description. Value is the value in the current scope, and is_beta is true if this is not the default value. """ # Avoid circular imports. from lp.services.features.flags import flag_info beta_info = {} for (flag_name, value_domain, documentation, default_behavior, title, url) in flag_info: if flag_name not in self.related_features: continue value = getFeatureFlag(flag_name) beta_info[flag_name] = { 'is_beta': (defaultFlagValue(flag_name) != value), 'title': title, 'url': url, 'value': value, } return beta_info
def _toFieldValue(self, input): if not input: return self.context.missing_value distribution = self.getDistribution() cached_value = self.cached_values.get(input) if cached_value: return cached_value if bool(getFeatureFlag('disclosure.dsp_picker.enabled')): try: self.context.vocabulary.setDistribution(distribution) return self.context.vocabulary.getTermByToken(input).value except LookupError: raise ConversionError( "Launchpad doesn't know of any source package named" " '%s' in %s." % (input, distribution.displayname)) # Else the untrusted SPN vocab was used so it needs secondary # verification. try: source = distribution.guessPublishedSourcePackageName(input) except NotFoundError: try: source = self.convertTokensToValues([input])[0] except InvalidValue: raise ConversionError( "Launchpad doesn't know of any source package named" " '%s' in %s." % (input, distribution.displayname)) self.cached_values[input] = source return source
def run(self): """See `IGitRefScanJob`.""" try: with try_advisory_lock( LockType.GIT_REF_SCAN, self.repository.id, Store.of(self.repository)): hosting_path = self.repository.getInternalPath() refs_to_upsert, refs_to_remove = ( self.repository.planRefChanges(hosting_path, logger=log)) self.repository.fetchRefCommits( hosting_path, refs_to_upsert, logger=log) # The webhook delivery includes old ref information, so # prepare it before we actually execute the changes. if getFeatureFlag('code.git.webhooks.enabled'): payload = self.composeWebhookPayload( self.repository, refs_to_upsert, refs_to_remove) getUtility(IWebhookSet).trigger( self.repository, 'git:push:0.1', payload) self.repository.synchroniseRefs( refs_to_upsert, refs_to_remove, logger=log) props = getUtility(IGitHostingClient).getProperties( hosting_path) # We don't want ref canonicalisation, nor do we want to send # this change back to the hosting service. removeSecurityProxy(self.repository)._default_branch = ( props["default_branch"]) except LostObjectError: log.info( "Skipping repository %s because it has been deleted." % self._cached_repository_name)
def open(self, fileid): if getFeatureFlag('librarian.swift.enabled'): # Log our attempt. self.swift_download_attempts += 1 if self.swift_download_attempts % 1000 == 0: log.msg('{} Swift download attempts, {} failures'.format( self.swift_download_attempts, self.swift_download_fails)) # First, try and stream the file from Swift. container, name = swift.swift_location(fileid) swift_connection = swift.connection_pool.get() try: headers, chunks = yield deferToThread( swift.quiet_swiftclient, swift_connection.get_object, container, name, resp_chunk_size=self.CHUNK_SIZE) swift_stream = TxSwiftStream(swift_connection, chunks) defer.returnValue(swift_stream) except swiftclient.ClientException as x: if x.http_status == 404: swift.connection_pool.put(swift_connection) else: self.swift_download_fails += 1 log.err(x) except Exception as x: self.swift_download_fails += 1 log.err(x) # If Swift failed, for any reason, fall through to try and # stream the data from disk. In particular, files cannot be # found in Swift until librarian-feed-swift.py has put them # in there. path = self._fileLocation(fileid) if os.path.exists(path): defer.returnValue(open(path, 'rb'))
def test_user_slice_from_rules(self): """Userslice matches against the real request user""" person = self.factory.makePerson() with FeatureFixture({}, full_feature_rules=[ dict(flag='test_feature', scope='userslice:0,1', priority=999, value=u'on'), dict(flag='test_not', scope='userslice:1,1', priority=999, value=u'not_value'), ]): with person_logged_in(person): self.assertEqual(getFeatureFlag('test_feature'), 'on') self.assertEqual(getFeatureFlag('test_not'), None)
def trigger_webhooks(tip_changed): old_revid = tip_changed.old_tip_revision_id new_revid = tip_changed.new_tip_revision_id if getFeatureFlag("code.bzr.webhooks.enabled") and old_revid != new_revid: payload = tip_changed.composeWebhookPayload(tip_changed.db_branch, old_revid, new_revid) getUtility(IWebhookSet).trigger(tip_changed.db_branch, "bzr:push:0.1", payload)
def create_snap(self): # Only enabled if the snap_private flag is enabled for # private contexts. enabled = ( not self.context.private or bool(getFeatureFlag(SNAP_PRIVATE_FEATURE_FLAG))) text = 'Create snap package' return Link('+new-snap', text, enabled=enabled, icon='add')
def validate(self, data): super(SnapAdminView, self).validate(data) # BaseSnapEditView.validate checks the rules for 'private' in # combination with other attributes. if data.get('private', None) is True: if not getFeatureFlag(SNAP_PRIVATE_FEATURE_FLAG): self.setFieldError( 'private', u'You do not have permission to create private snaps.')
def test_user_slice_from_rules(self): """Userslice matches against the real request user""" person = self.factory.makePerson() with FeatureFixture({}, full_feature_rules=[ dict( flag='test_feature', scope='userslice:0,1', priority=999, value=u'on'), dict( flag='test_not', scope='userslice:1,1', priority=999, value=u'not_value'), ]): with person_logged_in(person): self.assertEquals(getFeatureFlag('test_feature'), 'on') self.assertEquals(getFeatureFlag('test_not'), None)
def celery_enabled(class_name): """Determine whether a given class is configured to run via Celery. The name of a BaseRunnableJob must be specified. """ flag = getFeatureFlag('jobs.celery.enabled_classes') if flag is None: return False return class_name in flag.split(' ')
def initialize(self): """See `LaunchpadView`.""" super(SnapAddView, self).initialize() # Once initialized, if the private_snap flag is disabled, it # prevents snap creation for private contexts. if not getFeatureFlag(SNAP_PRIVATE_FEATURE_FLAG): if (IInformationType.providedBy(self.context) and self.context.information_type in PRIVATE_INFORMATION_TYPES): raise SnapPrivateFeatureDisabled
def test_threadGetFlag(self): self.populateStore() # the start-of-request handler will do something like this: controller, call_log = self.makeControllerInScopes(["default", "beta_user"]) install_feature_controller(controller) try: # then application code can simply ask without needing a context # object self.assertEqual(u"4.0", getFeatureFlag("ui.icing")) finally: install_feature_controller(None)
def merge_proposal_deleted(merge_proposal, event): """A merge proposal has been deleted.""" if getFeatureFlag(BRANCH_MERGE_PROPOSAL_WEBHOOKS_FEATURE_FLAG): # The merge proposal link will be invalid by the time the webhook is # delivered, but this may still be useful for endpoints that might # e.g. want to cancel CI jobs in flight. payload = { "action": "deleted", "old": _compose_merge_proposal_webhook_payload(merge_proposal), } _trigger_webhook(merge_proposal, payload)
def __init__(self, field, vocabulary, request): super(SourcePackageNameWidgetBase, self).__init__(field, vocabulary, request) self.cached_values = {} if bool(getFeatureFlag('disclosure.dsp_picker.enabled')): # The distribution may change later when we process form input, # but setting it here makes it easier to construct some views, # particularly edit views where we need to render the context. distribution = self.getDistribution() if distribution is not None: self.context.vocabulary.setDistribution(distribution)
def merge_proposal_created(merge_proposal, event): """A new merge proposal has been created. Create a job to update the diff for the merge proposal; trigger webhooks. """ getUtility(IUpdatePreviewDiffJobSource).create(merge_proposal) if getFeatureFlag(BRANCH_MERGE_PROPOSAL_WEBHOOKS_FEATURE_FLAG): payload = { "action": "created", "new": _compose_merge_proposal_webhook_payload(merge_proposal), } _trigger_webhook(merge_proposal, payload)
def _trigger_snap_build_webhook(snapbuild, action): if getFeatureFlag(SNAP_WEBHOOKS_FEATURE_FLAG): payload = { "snap_build": canonical_url(snapbuild, force_local_path=True), "action": action, } payload.update( compose_webhook_payload( ISnapBuild, snapbuild, ["snap", "build_request", "status", "store_upload_status"])) getUtility(IWebhookSet).trigger(snapbuild.snap, "snap:build:0.1", payload)
def test_threadGetFlag(self): self.populateStore() # the start-of-request handler will do something like this: controller, call_log = self.makeControllerInScopes( ['default', 'beta_user']) install_feature_controller(controller) try: # then application code can simply ask without needing a context # object self.assertEqual(u'4.0', getFeatureFlag('ui.icing')) finally: install_feature_controller(None)
def canUpgrade(self, action=None): """Should the form offer a packages upgrade?""" if getFeatureFlag("soyuz.derived_series_upgrade.enabled") is None: return False elif self.context.status not in UPGRADABLE_SERIES_STATUSES: # A feature freeze precludes blanket updates. return False elif self.getUpgrades().is_empty(): # There are no simple updates to perform. return False else: queue = PackageUploadQueue(self.context, None) return check_permission("launchpad.Edit", queue)
def before_traverse(event): "Handle profiling when enabled via the profiling.enabled feature flag." # This event is raised on each step of traversal so needs to be # lightweight and not assume that profiling has not started - but this is # equally well done in _maybe_profile so that function takes care of it. # We have to use this event (or add a new one) because we depend on the # feature flags system being configured and usable, and on the principal # being known. try: if getFeatureFlag('profiling.enabled'): _maybe_profile(event) except DisallowedStore: pass
def __init__(self, registrant, owner, distro_series, name, metadata, require_virtualized, date_created): """Construct a `LiveFS`.""" if not getFeatureFlag(LIVEFS_FEATURE_FLAG): raise LiveFSFeatureDisabled super(LiveFS, self).__init__() self.registrant = registrant self.owner = owner self.distro_series = distro_series self.name = name self.metadata = metadata self.require_virtualized = require_virtualized self.relative_build_score = 0 self.date_created = date_created self.date_last_modified = date_created
def test_get_features_into_oops(self): with FeatureFixture({'feature_name': 'value'}): with CaptureOops() as capture: request = LaunchpadTestRequest() self.assertEqual(getFeatureFlag('feature_name'), 'value') # Simulate an oops here. globalErrorUtility.raising(None, request=request) oops = capture.oopses[0] self.assertTrue('features.usedScopes' in oops) self.assertTrue('features.usedFlags' in oops) self.assertEqual(oops['features.usedFlags'], u"{'feature_name': u'value'}")
def test_get_features_into_oops(self): with FeatureFixture({'feature_name': 'value'}): with CaptureOops() as capture: request = LaunchpadTestRequest() self.assertEquals(getFeatureFlag('feature_name'), 'value') # Simulate an oops here. globalErrorUtility.raising(None, request=request) oops = capture.oopses[0] self.assertTrue('features.usedScopes' in oops) self.assertTrue('features.usedFlags' in oops) self.assertEquals(oops['features.usedFlags'], u"{'feature_name': u'value'}")
def do_one_sourcepackage(distro, source, package_root, importer_handler): source_data = SourcePackageData(**source) skip_key = u"%s/%s/%s" % (distro, source_data.package, source_data.version) skip_list = getFeatureFlag("soyuz.gina.skip_source_versions") if skip_list is not None and skip_key in skip_list.split(): log.info("Skipping %s %s as requested by feature flag.", source_data.package, source_data.version) return if importer_handler.preimport_sourcecheck(source_data): # Don't bother reading package information if the source package # already exists in the database log.info("%s already exists in the archive", source_data.package) return source_data.process_package(distro, package_root) source_data.ensure_complete() importer_handler.import_sourcepackage(source_data) importer_handler.commit()
def __init__(self, context, request): """Set the static packaging information for this series.""" super(ProductSeriesUbuntuPackagingView, self).__init__(context, request) self._ubuntu = getUtility(ILaunchpadCelebrities).ubuntu self._ubuntu_series = self._ubuntu.currentseries try: package = self.context.getPackage(self._ubuntu_series) if bool(getFeatureFlag('disclosure.dsp_picker.enabled')): self.default_sourcepackagename = self._ubuntu.getSourcePackage( package.sourcepackagename) else: self.default_sourcepackagename = package.sourcepackagename except NotFoundError: # The package has never been set. self.default_sourcepackagename = None
def _toFieldValue(self, input): """See `SourcePackageNameWidgetBase`.""" source = super(FileBugSourcePackageNameWidget, self)._toFieldValue(input) if (source is not None and not bool(getFeatureFlag('disclosure.dsp_picker.enabled'))): # XXX cjwatson 2016-07-25: Convert to a value that the # IBug.packagename vocabulary will accept. This is a fiddly # hack, but it only needs to survive until we can switch to the # DistributionSourcePackage picker across the board. bspn_vocab = getVocabularyRegistry().get( None, "BinaryAndSourcePackageName") bspn = bspn_vocab.getTermByToken(source.name).value self.cached_values[input] = bspn return bspn else: return source
def merge_proposal_modified(merge_proposal, event): """Notify branch subscribers when merge proposals are updated.""" # Check the user. if event.user is None: return if isinstance(event.user, UnauthenticatedPrincipal): from_person = None else: from_person = IPerson(event.user) old_status = event.object_before_modification.queue_status new_status = merge_proposal.queue_status in_progress_states = (BranchMergeProposalStatus.WORK_IN_PROGRESS, BranchMergeProposalStatus.NEEDS_REVIEW) # If the merge proposal was work in progress and is now needs review, # then we don't want to send out an email as the needs review email will # cover that. if (old_status != BranchMergeProposalStatus.WORK_IN_PROGRESS or new_status not in in_progress_states): # Create a delta of the changes. If there are no changes to report, # then we're done. delta = BranchMergeProposalNoPreviewDiffDelta.construct( event.object_before_modification, merge_proposal) if delta is not None: changes = text_delta(delta, delta.delta_values, delta.new_values, delta.interface) # Now create the job to send the email. getUtility(IMergeProposalUpdatedEmailJobSource).create( merge_proposal, changes, from_person) if getFeatureFlag(BRANCH_MERGE_PROPOSAL_WEBHOOKS_FEATURE_FLAG): payload = { "action": "modified", "old": _compose_merge_proposal_webhook_payload( event.object_before_modification), "new": _compose_merge_proposal_webhook_payload(merge_proposal), } # Some fields may not be in the before-modification snapshot; take # values for these from the new object instead. for field in payload["old"]: if not hasattr(event.object_before_modification, field): payload["old"][field] = payload["new"][field] _trigger_webhook(merge_proposal, payload)
def _get_request_timeout(timeout=None): """Get the timeout value in ms for the current request. :param timeout: A custom timeout in ms. :return None or a time in ms representing the budget to grant the request. """ if not getattr(_local, 'enable_timeout', True): return None if timeout is None: timeout = config.database.db_statement_timeout interaction_extras = get_interaction_extras() if (interaction_extras is not None and interaction_extras.permit_timeout_from_features): set_permit_timeout_from_features(False) try: timeout_str = features.getFeatureFlag('hard_timeout') finally: set_permit_timeout_from_features(True) if timeout_str: try: timeout = float(timeout_str) except ValueError: logging.error('invalid hard timeout flag %r', timeout_str) return timeout
def test_flags_unset_outside_feature_flags_context(self): """get fails when used outside the feature_flags context.""" with feature_flags(): set_feature_flag(u'name', u'value') self.assertIs(None, getFeatureFlag('name'))
def _enabled(self): configured_value = features.getFeatureFlag('memcache') if configured_value is None: return True else: return configured_value
def _verifyDkimOrigin(signed_message): """Find a From or Sender address for which there's a DKIM signature. :returns: A string email address for the trusted sender, if there is one, otherwise None. :param signed_message: ISignedMessage """ log = logging.getLogger('mail-authenticate-dkim') log.setLevel(logging.DEBUG) if getFeatureFlag('mail.dkim_authentication.disabled'): log.info('dkim authentication feature disabled') return None # uncomment this for easier test debugging # log.addHandler(logging.FileHandler('/tmp/dkim.log')) dkim_log = cStringIO() log.info( 'Attempting DKIM authentication of message id=%r from=%r sender=%r' % (signed_message['Message-ID'], signed_message['From'], signed_message['Sender'])) signing_details = [] dkim_result = False try: dkim_result = dkim.verify( signed_message.parsed_string, dkim_log, details=signing_details) except dkim.DKIMException as e: log.warning('DKIM error: %r' % (e,)) except dns.resolver.NXDOMAIN as e: # This can easily happen just through bad input data, ie claiming to # be signed by a domain with no visible key of that name. It's not an # operational error. log.info('DNS exception: %r' % (e,)) except dns.exception.DNSException as e: # many of them have lame messages, thus %r log.warning('DNS exception: %r' % (e,)) except Exception as e: # DKIM leaks some errors when it gets bad input, as in bug 881237. We # don't generally want them to cause the mail to be dropped entirely # though. It probably is reasonable to treat them as potential # operational errors, at least until they're handled properly, by # making pydkim itself more defensive. log.warning( 'unexpected error in DKIM verification, treating as unsigned: %r' % (e,)) log.info('DKIM verification result: trusted=%s' % (dkim_result,)) log.debug('DKIM debug log: %s' % (dkim_log.getvalue(),)) if not dkim_result: return None # in addition to the dkim signature being valid, we have to check that it # was actually signed by the user's domain. if len(signing_details) != 1: log.info( 'expected exactly one DKIM details record: %r' % (signing_details,)) return None signing_domain = signing_details[0]['d'] if not _isDkimDomainTrusted(signing_domain): log.info("valid DKIM signature from untrusted domain %s" % (signing_domain,)) return None for origin in ['From', 'Sender']: if signed_message[origin] is None: continue name, addr = parseaddr(signed_message[origin]) try: origin_domain = addr.split('@')[1] except IndexError: log.warning( "couldn't extract domain from address %r", signed_message[origin]) if signing_domain == origin_domain: log.info( "DKIM signing domain %s matches %s address %r", signing_domain, origin, addr) return addr else: log.info("DKIM signing domain %s doesn't match message origin; " "disregarding signature" % (signing_domain)) return None
def test_fixture_overrides_previously_set_flags(self): self.useFixture(FeatureFixture({'one': '1'})) self.useFixture(FeatureFixture({'one': '5'})) self.assertEqual(getFeatureFlag('one'), u'5')
def test_fixture_deletes_existing_values(self): self.useFixture(FeatureFixture({'one': '1'})) self.useFixture(FeatureFixture({'two': '2'})) self.assertEqual(getFeatureFlag('one'), None) self.assertEqual(getFeatureFlag('two'), u'2')
def test_fixture_does_not_set_value_for_flags_that_are_None(self): self.useFixture(FeatureFixture({'nothing': None})) self.assertEqual(getFeatureFlag('nothing'), None)
def test_flags_set_within_feature_flags_context(self): """In the feature_flags context, set/get works.""" self.useContext(feature_flags()) set_feature_flag(u'name', u'value') self.assertEqual('value', getFeatureFlag('name'))
def markdown(self): if getFeatureFlag("markdown.enabled"): return format_markdown(self._stringtoformat) else: return self.text_to_html()
def test_threadGetFlagNoContext(self): # If there is no context, please don't crash. workaround for the root # cause in bug 631884. install_feature_controller(None) self.assertEqual(None, getFeatureFlag("ui.icing"))
def show_whatslaunchpad(self): """True if introduction to Launchpad should be displayed. Shown when not logged in or if blog is disabled. """ return self.user is None or not getFeatureFlag("app.root_blog.enabled")