def soupIt(currentUrl, selector, gameType, loginRequired = False):
    if (__dbg__):
        if gameType != empty:
            print ("hockeystreams: enter soupIt url %s selector %s gameType %s" % (
            currentUrl, selector, gameType.pattern))
        else:
            print (
            "hockeystreams: enter soupIt  url %s selector %s gameType %s" % (currentUrl, selector, "empty"))
    if loginRequired:
        html = gethtml.get(currentUrl, cookiepath)
    else:
        html = gethtml.get(currentUrl)

    if (__dbg__ and super_verbose_logging):
        print ("hockeystreams: \t\tfetch browser result %s " % html)

    
    if (__dbg__):
        print ("hockeystreams: \t\t soupIt %s " % html)
    soup = BeautifulSoup(''.join(html))

    if selector == 'input':
        found = soup.findAll('input')
        found.extend(soup.findAll())
    else:
        found = soup.findAll(attrs={'href': gameType})
    del selector
    print "hockeystreams: soupit: found count " + str(len(found))
    return found
    def soupIt(self, currentUrl, selector, gameType, loginRequired = False):
        if self.__dbg__:
            if gameType is not None:
                print ("hockeystreams: enter soupIt url %s selector %s gameType %s" % (
                currentUrl, selector, gameType.pattern))
            else:
                print (
                "hockeystreams: enter soupIt  url %s selector %s gameType %s" % (currentUrl, selector, "empty"))
        if loginRequired:
            try:
                html = gethtml.get(currentUrl, cookiepath = self.cookiepath, debug = self.__dbg__)
                if html is None:
                    raise IndexError
            except IndexError:
                self.__settings__.openSettings()
                self.login()
                return self.soupIt(currentUrl, selector, gameType, loginRequired)
        else:
            html = gethtml.get(currentUrl, debug = self.__dbg__)

        if self.__dbg__:            print ("hockeystreams: \t\tfetch browser result %s " % html)
        if self.__dbg__:            print ("hockeystreams: \t\t soupIt %s " % html)
        soup = BeautifulSoup(''.join(html))

        if selector == 'input':
            found = soup.findAll('input')
            found.extend(soup.findAll('href'))
        else:
            found = soup.findAll(attrs={'href': gameType})
        del selector
        print "hockeystreams: soupit: found count " + str(len(found))
        return found
Example #3
0
def sp(URL, TN, threadNum):
    #Check
    a = checka(URL, TN)
    if a == "error0":
        print("Url %s is not a xxx,Exiting thread..." % URL)
        minThreadNumber()
        exit()
    if a == "error1":
        print("Url %s was scanned,Exiting thread..." % URL)
        minThreadNumber()
        exit()
    else:  #IF ERROR
        URL = a
    temp2 = checkhash(URL)
    if temp2 == "non":
        print("Url %s was scanned,Exiting thread..." % URL)
        minThreadNumber()
        exit()
    else:
        try:
            print("Now start thread: %s" % URL)
            one = gethtml.get(URL)  #Get the page of URL.
            two = getre.get(r'(?<=href=\").+?(?=\")|(?<=href=\').+?(?=\')',
                            one.decode("utf-8"))
        except:
            print("Some error in %s,exiting,," % a)
            minThreadNumber()
            exit()
        tempf = open("txts/URLs.txt", "at")
        tempf.write("%s\n" % URL)  #Temp the URL.
        tempf.close()
        startt(two, URL, threadNum)
Example #4
0
def get_html(url, cookie=None, user_agent=None, referer=None):
    d = gethtml.get(url,
                    __datapath__,
                    cookie=cookie,
                    user_agent=user_agent,
                    referer=referer)
    return six.ensure_text(d) if six.PY3 else d
def doLogin(cookiepath, username, password, debug = False):
    #check if user has supplied only a folder path, or a full path
    if not os.path.isfile(cookiepath):
        #if the user supplied only a folder path, append on to the end of the path a filename.
        cookiepath = os.path.join(cookiepath,'cookies.lwp')
        
    #delete any old version of the cookie file
    try:
        os.remove(cookiepath)
    except:
        pass

    if username and password:
        #the url you will request to.
        login_url = 'http://www5.hockeystreams.com/verify/login'

        #the header used to pretend you are a browser
        header_string = 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3'

	    #build the form data necessary for the login
        login_data = urllib.urlencode({'username':username, 'password':password,'submit':'Sign In'})#, 'memento':1, 'x':0, 'y':0, 'do':'login'})

        #build the request we will make
        req = urllib2.Request(login_url, login_data)
        req.add_header('User-Agent',header_string)

        #initiate the cookielib class
        cj = cookielib.LWPCookieJar()

        #install cookielib into the url opener, so that cookies are handled
        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))

        #do the login and get the response
        response = opener.open(req)
        if debug:
            print str(response)
            source = response.read()
            print source
        response.close()

        cj.save(xbmc.translatePath(cookiepath))
        if debug:
            print "cookies!" + str(cj._cookies)
        #check the received html for a string that will tell us if the user is logged in
        #pass the username, which can be used to do this.
        url = "http://www.hockeystreams.com"
        page = gethtml.get(url, cj = cj)
        if debug:
            print page
            print "nidex + " + str(page.find('SIGN OUT')) + "/" + str(len(page))
        login = check_login(page, username)
        #if login suceeded, save the cookiejar to disk
#        if not login:
#            os.remove(cookiepath)
        #return whether we are logged in or not
        return login
    else:
        return False
 def QUALITY(self, url, gamename):
     if self.__dbg__: print "hockeystreams: enter quality"
     mode = 2001
     real_date = str(self.today) # TODO
     # add the m3u8 first
     gameName = "Live " + gamename
     self.util.addLink(gameName, gameName, real_date, url, '', 1, mode)
     # then add the composed feeds
     if self.__dbg__: print "hockeystreams reading " + url
     m3u8Outer = gethtml.get(url).splitlines()
     if "#EXTM3U" in m3u8Outer[0]:#.readline():
         bandwidthLine = True
         loops = m3u8Outer
         for line in loops[1:]:
             line = line.strip()
             if bandwidthLine:
                 bandwidth = re.search(self.bandwidthMatch, line).group(1)
                 gameName = str(int(bandwidth)/1000) + self.util.__settings__.getLocalizedString(40500) + " " + gamename
             else:
                 url = line
                 self.util.addLink(gameName, gameName, real_date, url, '', 1, mode)
             bandwidthLine = not bandwidthLine
Example #7
0
def get_html(url):
    return gethtml.get(url, __datapath__)
Example #8
0
def get_html(url, cookie=None, user_agent=None):
    return gethtml.get(url, __datapath__, cookie=cookie, user_agent=user_agent)
Example #9
0
Ventana = xbmcgui.Window()


#Menu Contextual en desarrollo
MENUDIALOGO = xbmcgui.Dialog()
MENUCONTEXTUAL = xbmcgui.ListItem()
ACTION_CONTEXT_MENU = 100

dialogo = xbmcgui.Dialog()
cookiepath = __settings__.getAddonInfo('Path')
use_account = __settings__.getSetting('use-account')
username = __settings__.getSetting('username')
password = __settings__.getSetting('password')
version = xbmcaddon.Addon().getAddonInfo('version')
url = "http://xbmcspain.com/foro/ucp.php?mode=login&redirect=.%2Findex.php"
source = gethtml.get(url)

try:
    try:
        raise
        import xml.etree.cElementTree as ElementTree
    except:
        from xml.etree import ElementTree
except:
    try:
        from elementtree import ElementTree
    except:
        dlg = xbmcgui.Dialog()
        dlg.ok('ElementTree missing', 'Please install the elementree addon.',
                'http://tinyurl.com/xmbc-elementtree')
        sys.exit(0)
Example #10
0
def get_html(url):
    return gethtml.get(url, __datapath__)
    hockey.QUALITY(url, gamename)
elif mode == 2000:
    cache = not (today.year == year and today.month == month and today.day == day)
    QUICK_PLAY_VIDEO(url)
elif mode == 2001:
    cache = not (today.year == year and today.month == month and today.day == day)
    PLAY_VIDEO(url)


elif mode == 66:
    cache = False
    if not hockeyUtil.login():
        print "failed"
        hockeyUtil.addDir(__settings__.getLocalizedString(40001), hockeystreams, 0, '', 5)
    else:
        hockeyUtil.addDir(__settings__.getLocalizedString(40000), hockeystreams, 0, '', 5)
elif mode == 99:
    cache = False
    if not hockeyUtil.login():
        hockeyUtil.addDir(__settings__.getLocalizedString(40001), hockeystreams, 0, '', 5)
    else:
        exception_data = urllib.urlencode({'update': 'Update Exception'})
        exception_url = hockeystreams + "/include/exception.inc.php?" + exception_data
        try:
            read = gethtml.get(exception_url, cookiepath, __dbg__)
            hockeyUtil.addDir(__settings__.getLocalizedString(40000), hockeystreams, 0, '', 5)
        except:
            hockeyUtil.addDir(__settings__.getLocalizedString(40001), hockeystreams, 0, '', 5)

xbmcplugin.endOfDirectory(int(sys.argv[1]), cacheToDisc = cache)
    BY_TEAM(archivestreams, 31)
elif mode == 31:
    ARCHIVE_GAMES_BY_TEAM(url, 1000)
elif mode == 1000:
    QUALITY(url, gamename)
    cache = False
elif mode == 2000:
    PLAY_VIDEO(url)
    cache = not (today.year == year and today.month == month and today.day == day)

elif mode == 66:
    if not login():
        print "failed"
        addDir('failed!', hockeystreams, 0, '', 5)
    else:
        addDir('succeeded!', hockeystreams, 0, '', 5)
elif mode == 99:
    if not login():
        addDir('failed!', hockeystreams, 0, '', 5)
    else:
        exception_data = urllib.urlencode({'update': 'Update Exception'})
        exception_url = hockeystreams + "/include/exception.inc.php?" + exception_data
        read = gethtml.get(exception_url, cookiepath)
        addDir('succeeded!', hockeystreams, 0, '', 5)

if mode == 69:
    #xbmcplugin.openSettings(sys.argv[0])
    pass
else:
    xbmcplugin.endOfDirectory(int(sys.argv[1]), cacheToDisc = cache)
Example #13
0
def get_html(url, cookie=None, user_agent=None):
    return gethtml.get(url, __datapath__, cookie=cookie, user_agent=user_agent)
Example #14
0
def Browse_EpisodesDiff(url, page='', content='episodes', view='515'):
    html = gethtml.get(url + '/odcinki', addonPath)
    htmlplot = gethtml.get(url , addonPath)
    html = messupText(html, ciastko, True, True)
    s = "#(.+?)</div><div class=.+?</div><div class='con3'><a href='(.+?)' class='i'>"
    matches = re.compile(s).findall(html)
    ItemCount = len(matches)
    if ItemCount > 0:
        for  _nazwa, _url in matches:
            _url2 = 'http://diff-anime.pl' + _url
            _name = 'Odcinek' + _nazwa
            _title = '' + _name
#  grafika
            image = re.compile("</div><div class='content'><div class='con'><a href='(.+?)' class='fbox'>").findall(html)
            ItemCount = len(image)
            if len(image) > 0:
                for foto in image:
                    img = "http://diff-anime.pl" + foto
            else:
                    img = ""
#  fanart
            if "Nie dodano kadrów do tej serii." in html:
                fanart = fanartSite
            else:
                image2 = re.compile("<h2>Kadry</h2></div><div class='content'><a href='(.+?)' class='fbox'>").findall(html)
                ItemCount = len(image)
                if len(image) > 0:
                    for _fanart in image2:
                        fanart = "http://diff-anime.pl" + _fanart
                else:
                        fanart = img
#  opis
            opis = re.compile("<h2>Opis anime</h2></div><div class='content'><div class='con'>(.+?)</div>").findall(htmlplot)
            ItemCount = len(opis)
            if len(opis) > 0:
                for desc in opis:
                    plot = unicode(desc,"utf-8")
            else:
                    opis = re.compile("<h2>Opis anime</h2></div><div class='content'><div class='con'>(.+?)<").findall(htmlplot)
                    ItemCount = len(opis)
                    if len(opis) > 0:
                        for desc in opis:
                            plot = unicode(desc,"utf-8")
                    else:
                            opis = re.compile("<div id='pDesc' class='panel'><div class='head'><h2>Opis anime</h2></div><div class='content'><div class='con'>(.+?)<br />").findall(htmlplot)
                            ItemCount = len(opis)
                            if len(opis) > 0:
                                for desc in opis:
                                    plot = unicode(desc,"utf-8")
                            else:
                                    plot = ""
            labs = {}
            try:
                labs['plot'] = plot
            except:
                labs['plot'] = ''
#  wyciąganie linku do mp4
            html2 = gethtml.get(_url2, addonPath)
            _link = re.compile("'file': '(.+?)',").findall(html2)
            ItemCount = len(_link)
            if len(_link) > 0:
                for link in _link:
                    strona = link.replace(' ', '%20')
###
            contextLabs = {'title': _name, 'year': '0000', 'url': _url2, 'img': img, 'fanart': fanart, 'DateAdded': '', 'plot': labs['plot']}
            contextMenuItems = ContextMenu_Episodes(labs=contextLabs)
            pars = {'mode': 'PlayFromHost', 'site': site, 'section': section, 'title': _name, 'url': strona, 'img': img, 'fanart': fanart}
            labs['title'] = _title
            _addon.add_directory(pars, labs, is_folder=False, fanart=fanart, img=img, contextmenu_items=contextMenuItems, total_items=ItemCount)
    set_view(content, int(addst('links-view')))
    eod()
Example #15
0
def get_html(url, cookie=None):
    return gethtml.get(url, __datapath__, cookie=cookie)