Beispiel #1
0
def internalXMLGenerator(url, verbose):
    haltOnError = True
    
    linkOK, linkError = checkLink(url, haltOnError, verbose)

    if not linkOK:
        onError(58,linkError)
    else:
        if verbose:
            printInfo1("Link OK")
    
    firstPage = getWebPage(url, verbose)
    
    if firstPage:
        if "svtplay" in url.lower():
            xmlCode = svtPlayXML(firstPage, verbose)
            if xmlCode == "Error":
                onError(66, "Not able to find link with internal XML-generator")
                xmlCode = ""
            else:
                xmlCode = '\n'.join(xmlCode)
            if verbose:
                printInfo1("XML code:")
                print xmlCode
                return xmlCode
        else:
            onError(64, "Not able to run local python XML generator on this address")
            xmlCode = ""
    else:
        onError(59, "Could not download webpage")
        
    if not verbose:
        print "\n"
    return xmlCode
Beispiel #2
0
def _getMetaclassNameToLocalPageMap():
  global METACLASSNAME_TO_LOCALPAGE_MAP
  if METACLASSNAME_TO_LOCALPAGE_MAP is None:
    regexpr = METAMODEL_ROOT_ENTRY_REGEXPR[MODELIO_SIMPLE_VERSION]
    html = getWebPage(METAMODEL_INDEX_URL) 
    METACLASSNAME_TO_LOCALPAGE_MAP = {}
    for match in re.findall(regexpr,html):
      (localurl,metaclassname) = match
      METACLASSNAME_TO_LOCALPAGE_MAP[metaclassname] = localurl
  return METACLASSNAME_TO_LOCALPAGE_MAP
Beispiel #3
0
def checkSecondSvtPage(url, verbose):
    secondTag = ""
    
    secondPage = getWebPage(url, verbose)
    
    if verbose:
        printInfo2("Parsing page...")
    else:
        sys.stdout.write(".")
        sys.stdout.flush()

    soup = BeautifulSoup(secondPage)
    items = soup.findAll("embed", attrs={'attr' : True})
    secondTag = items[0]['attr']
    
    if secondTag:
        if verbose:
            printInfo1("Found second tag:")
            print secondTag
            printInfo2("Decoding...")
        else:
            sys.stdout.write(".")
            sys.stdout.flush()
        secondTag = urllib.unquote(secondTag.encode('utf8')).decode('utf8')
        if verbose:
            printInfo1("Decoded tag:")
            print secondTag
        secondTag = secondTag.split('=', 1)[-1]
    else:
        printError("Did not find second tag") 
    
    if verbose:
        printInfo2("Converting to json...")
    else:
        sys.stdout.write(".")
        sys.stdout.flush()

    jsonString = json.loads(secondTag)
    
    if verbose:
        printInfo1("JSON string:")
        print json.dumps(jsonString['video'], sort_keys=True, indent=2)
        printInfo2("Extracting video link...")
    else:
        sys.stdout.write(".")
        sys.stdout.flush()
        
    
    videoLink = jsonString['video']['videoReferences'][0]['url']  
    
    if verbose:
        printInfo1("Found video link:")
        print videoLink
    else:
        sys.stdout.write(".")
        sys.stdout.flush()
        
    videos = checkVideoLink(videoLink, verbose)
    
    if verbose:
        printInfo2("Extracting subtitle link...")
    else:
        sys.stdout.write(".")
        sys.stdout.flush()
        
    if "url" in jsonString['video']['subtitleReferences'][0]:
        subtitleLink = jsonString['video']['subtitleReferences'][0]['url']
        checkSubtitleLink(subtitleLink, verbose)
        if verbose:
            printInfo1("Found subtitle link:")
            print subtitleLink
        else:
            sys.stdout.write(".")
            sys.stdout.flush()
    else:
        if verbose:
            printWarning("No subtitles found")
        else:
            sys.stdout.write(".")
            sys.stdout.flush()
        subtitleLink = ""
    
    if verbose:
        printInfo1("Found videos:")
        for video in videos:
            print video
    else:
        sys.stdout.write(".")
        sys.stdout.flush()
        
    xmlCode = composeXML(videos, subtitleLink, verbose)
    
    return xmlCode