def finished_postprocess(self, err, errmsg): if err != 0: self.request_done(err, errmsg) return url = self.srcurl # Add input documentURL to the parameters params = self.xsltparameters.copy() params['docurl'] = utils.xpath_str(url) minpriority = self.processing.get('minquality', 0) maxpriority = self.processing.get('maxquality', 100) templatefile = os.path.join(template_path, self.xsltfile) src = self.dl.get_body() encoding = self.dl.get_encoding() if self.processing.has_key('postprocess') and \ 'json2xml' in self.processing['postprocess']: xmldoc = json2xml.json2xml(src, encoding) if xmldoc is None: self.request_done(503, 'Invalid JSON content') return src = xmldoc.serialize('utf-8') encoding = 'utf-8' #debug(src) resulttree = utils.apply_xslt(src, encoding, url, templatefile, params) if resulttree is None: self.request_done(503, 'XSLT transformation failed') return if self.type == WebviRequestType.MENU: debug("result:") debug(resulttree) self.writewrapper(resulttree) self.request_done(0, None) elif self.type == WebviRequestType.STREAMURL: self.mediaurls = self.parse_mediaurl(resulttree, minpriority, maxpriority) if self.mediaurls: self.check_and_send_url() else: self.request_done(406, 'No valid URLs found') elif self.type == WebviRequestType.FILE: self.mediaurls = self.parse_mediaurl(resulttree, minpriority, maxpriority) if self.mediaurls: self.start_download() else: self.request_done(406, 'No valid URLs found') else: self.request_done(0, None)
def parse_reference(reference): """Parses URLs of the following form: wvt:///youtube/video.xsl?srcurl=http%3A%2F%2Fwww.youtube.com%2F¶m=name1,value1¶m=name2,value2 reference is assumed to be URL-encoded UTF-8 string. Returns (template, srcurl, params, processing_instructions) where template if the URL path name (the part before ?), srcurl is the parameter called srcurl, and params is a dictionary of (name, quoted-value) pairs extracted from param parameters. Parameter values are quoted so that the xslt parser handles them as string. processing_instructions is dictionary of options that affect the further processing of the data. """ try: reference = str(reference) except UnicodeEncodeError: return (None, None, None, None) if not reference.startswith('wvt:///'): return (None, None, None, None) ref = reference[len('wvt:///'):] template = None srcurl = '' parameters = {} substitutions = {} refsettings = {'HTTP-headers': {}} fields = ref.split('?', 1) template = fields[0] if len(fields) == 1: return (template, srcurl, parameters, refsettings) for par in fields[1].split('&'): paramfields = par.split('=', 1) key = paramfields[0] if len(paramfields) == 2: value = urllib.unquote(paramfields[1]) else: value = '' if key.lower() == 'srcurl': srcurl = value elif key.lower() == 'param': fields2 = value.split(',', 1) pname = fields2[0].lower() if len(fields2) == 2: pvalue = fields2[1] else: pvalue = '' parameters[pname] = utils.xpath_str(pvalue) elif key.lower() == 'subst': substfields = value.split(',', 1) if len(substfields) == 2: substitutions[substfields[0]] = substfields[1] elif key.lower() == 'minquality': try: refsettings['minquality'] = int(value) except ValueError: pass elif key.lower() == 'maxquality': try: refsettings['maxquality'] = int(value) except ValueError: pass elif key.lower() == 'postprocess': refsettings.setdefault('postprocess', []).append(value) elif key.lower() == 'contenttype': refsettings['overridecontenttype'] = value elif key.lower() == 'http-header': try: headername, headerdata = value.split(',', 1) except ValueError: continue refsettings['HTTP-headers'][headername] = headerdata if substitutions: srcurl = brace_substitution(srcurl, substitutions) return (template, srcurl, parameters, refsettings)