def clean_biography(self): biography = self.cleaned_data['biography'] normalized = clean_nl(unicode(biography)) if has_links(normalized): # There's some links, we don't want them. raise forms.ValidationError(ugettext('No links are allowed.')) return biography
def clean_bio(self): bio = self.cleaned_data['bio'] normalized = clean_nl(unicode(bio)) if has_links(normalized): # There's some links, we don't want them. raise forms.ValidationError(_('No links are allowed.')) return bio
def clean_description(self): description = self.cleaned_data['description'] normalized = clean_nl(description) if has_links(normalized): # There's some links, we don't want them. raise forms.ValidationError(ugettext('No links are allowed.')) return description
def clean_biography(self): biography = self.cleaned_data['biography'] normalized = clean_nl(six.text_type(biography)) if has_links(normalized): # There's some links, we don't want them. raise forms.ValidationError(ugettext('No links are allowed.')) return biography
def no_links(string): """Leave text links untouched, keep only inner text on URLs.""" if not string: return string if hasattr(string, '__html__'): string = string.__html__() no_links = remove_tags(string, 'a A') return jinja2.Markup(clean_nl(no_links).strip())
def no_links(string): """Leave text links untouched, keep only inner text on URLs.""" if not string: return string if hasattr(string, '__html__'): string = string.__html__() allowed_tags = bleach.ALLOWED_TAGS[:] allowed_tags.remove('a') no_links = bleach.clean(string, tags=allowed_tags, strip=True) return jinja2.Markup(clean_nl(no_links).strip())
def clean(string, strip_all_html=False): """Clean html with bleach. :param string string: The original string to clean. :param bool strip_all_html: If given, remove all html code from `string`. """ # Edgecase for PurifiedTranslation to avoid already-escaped html code # to slip through. This isn't a problem if `strip_all_html` is `False`. if isinstance(string, PurifiedTranslation) and strip_all_html: string = string.localized_string if hasattr(string, '__html__'): string = string.__html__() if strip_all_html: string = bleach.clean(unicode(string), tags=[], strip=True) else: string = bleach.clean(unicode(string)) return jinja2.Markup(clean_nl(string).strip())
def clean(string): return jinja2.Markup(clean_nl(bleach.clean(unicode(string))).strip())
def validate_biography(self, value): if has_links(clean_nl(unicode(value))): # There's some links, we don't want them. raise serializers.ValidationError( ugettext(u'No links are allowed.')) return value
def clean(self): from olympia.amo.utils import clean_nl super(PurifiedTranslation, self).clean() cleaned = self.clean_localized_string() self.localized_string_clean = clean_nl(cleaned).strip()
def clean_html(self, string): return clean_nl(bleach.clean(str(string), tags=[], strip=True)).strip()
def validate_description(self, value): if has_links(clean_nl(str(value))): # There's some links, we don't want them. raise serializers.ValidationError( ugettext('No links are allowed.')) return value