def valid_url(prop,value,report): """ checks url(...) arguments in CSS, ensuring that the contents are officially sanctioned. Sanctioned urls include: * anything in /static/ * image labels %%..%% for images uploaded on /about/stylesheet * urls with domains in g.allowed_css_linked_domains """ try: url = value.getStringValue() except IndexError: g.log.error("Problem validating [%r]" % value) raise # local urls are allowed if local_urls.match(url): t_url = None while url != t_url: t_url, url = url, filters.url_unescape(url) # disallow path trickery if "../" in url: report.append(ValidationError(msgs['broken_url'] % dict(brokenurl = value.cssText), value)) # custom urls are allowed, but need to be transformed into a real path elif custom_img_urls.match(url): name = custom_img_urls.match(url).group(1) # the label -> image number lookup is stored on the subreddit if c.site.images.has_key(name): url = c.site.images[name] if isinstance(url, int): # legacy url, needs to be generated bucket = g.s3_old_thumb_bucket baseurl = "http://%s" % (bucket) if g.s3_media_direct: baseurl = "http://%s/%s" % (s3_direct_url, bucket) url = "%s/%s_%d.png"\ % (baseurl, c.site._fullname, url) url = s3_https_if_secure(url) value._setCssText("url(%s)"%url) else: # unknown image label -> error report.append(ValidationError(msgs['broken_url'] % dict(brokenurl = value.cssText), value)) else: try: u = urlparse(url) valid_scheme = u.scheme and u.scheme in valid_url_schemes valid_domain = strip_www(u.netloc) in g.allowed_css_linked_domains except ValueError: u = False # allowed domains are ok if not (u and valid_scheme and valid_domain): report.append(ValidationError(msgs['broken_url'] % dict(brokenurl = value.cssText), value))
def valid_url(prop, value, report, generate_https_urls, enforce_custom_images_only): """ checks url(...) arguments in CSS, ensuring that the contents are officially sanctioned. Sanctioned urls include: * anything in /static/ * image labels %%..%% for images uploaded on /about/stylesheet * urls with domains in g.allowed_css_linked_domains """ try: url = value.getStringValue() except IndexError: g.log.error("Problem validating [%r]" % value) raise # local urls are allowed if local_urls.match(url): if enforce_custom_images_only: report.append(ValidationError(msgs["custom_images_only"], value)) return t_url = None while url != t_url: t_url, url = url, filters.url_unescape(url) # disallow path trickery if "../" in url: report.append(ValidationError(msgs["broken_url"] % dict(brokenurl=value.cssText), value)) # custom urls are allowed, but need to be transformed into a real path elif custom_img_urls.match(url): name = custom_img_urls.match(url).group(1) # this relies on localcache to not be doing a lot of lookups images = ImagesByWikiPage.get_images(c.site, "config/stylesheet") if name in images: if not generate_https_urls: url = images[name] else: url = s3_direct_https(images[name]) value._setCssText("url(%s)" % url) else: # unknown image label -> error report.append(ValidationError(msgs["broken_url"] % dict(brokenurl=value.cssText), value)) else: if enforce_custom_images_only: report.append(ValidationError(msgs["custom_images_only"], value)) return try: u = urlparse(url) valid_scheme = u.scheme and u.scheme in valid_url_schemes valid_domain = u.netloc in g.allowed_css_linked_domains except ValueError: u = False # allowed domains are ok if not (u and valid_scheme and valid_domain): report.append(ValidationError(msgs["broken_url"] % dict(brokenurl=value.cssText), value))
def valid_url(prop, value, report): """ checks url(...) arguments in CSS, ensuring that the contents are officially sanctioned. Sanctioned urls include: * anything in /static/ * image labels %%..%% for images uploaded on /about/stylesheet * urls with domains in g.allowed_css_linked_domains """ try: url = value.getStringValue() except IndexError: g.log.error("Problem validating [%r]" % value) raise # local urls are allowed if local_urls.match(url): t_url = None while url != t_url: t_url, url = url, filters.url_unescape(url) # disallow path trickery if "../" in url: report.append( ValidationError( msgs['broken_url'] % dict(brokenurl=value.cssText), value)) # custom urls are allowed, but need to be transformed into a real path elif custom_img_urls.match(url): name = custom_img_urls.match(url).group(1) # this relies on localcache to not be doing a lot of lookups images = ImagesByWikiPage.get_images(c.site, "config/stylesheet") if name in images: url = s3_https_if_secure(images[name]) value._setCssText("url(%s)" % url) else: # unknown image label -> error report.append( ValidationError( msgs['broken_url'] % dict(brokenurl=value.cssText), value)) else: try: u = urlparse(url) valid_scheme = u.scheme and u.scheme in valid_url_schemes valid_domain = u.netloc in g.allowed_css_linked_domains except ValueError: u = False # allowed domains are ok if not (u and valid_scheme and valid_domain): report.append( ValidationError( msgs['broken_url'] % dict(brokenurl=value.cssText), value))
def valid_url(prop,value,report): """ checks url(...) arguments in CSS, ensuring that the contents are officially sanctioned. Sanctioned urls include: * anything in /static/ * image labels %%..%% for images uploaded on /about/stylesheet * urls with domains in g.allowed_css_linked_domains """ try: url = value.getStringValue() except IndexError: g.log.error("Problem validating [%r]" % value) raise # local urls are allowed if local_urls.match(url): t_url = None while url != t_url: t_url, url = url, filters.url_unescape(url) # disallow path trickery if "../" in url: report.append(ValidationError(msgs['broken_url'] % dict(brokenurl = value.cssText), value)) # custom urls are allowed, but need to be transformed into a real path elif custom_img_urls.match(url): name = custom_img_urls.match(url).group(1) # the label -> image number lookup is stored on the subreddit if c.site.images.has_key(name): num = c.site.images[name] value._setCssText("url(http://%s/%s_%d.png?v=%s)" % (g.s3_thumb_bucket, c.site._fullname, num, randstr(36))) else: # unknown image label -> error report.append(ValidationError(msgs['broken_url'] % dict(brokenurl = value.cssText), value)) # allowed domains are ok elif domain(url) in g.allowed_css_linked_domains: pass else: report.append(ValidationError(msgs['broken_url'] % dict(brokenurl = value.cssText), value))