def hdcastream(page_data,file): #site dinozap
    headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; rv:29.0) Gecko/20100101 Firefox/29.0'}
    url='http://www.hdcastream.com/channel.php?file=%s&width=700&height=400&autostart=true' %file
    req = Location_only(url,headers)
    #req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.3; rv:29.0) Gecko/20100101 Firefox/29.0')
    #req.add_header('Referer', 'http://ibrod.tv/cw-tv-oline.html')
    ress = requests.get(req, headers = headers,allow_redirects=False)
    src=ress.text
    #response.close()
    
    iframe = re_me(src,'src=\"(.*?)\"')
    print ('iframe is %s' %iframe)
    #get location from header to get domain
    r = requests.head(iframe, headers = headers,allow_redirects=False)
    try:
        ref = r.headers['Location']
        print ('ref is %s' %ref)
        k = ref.split('domainprotect=')
        referer = k[1]
        headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; rv:29.0) Gecko/20100101 Firefox/29.0','Referer' : '%s' %referer}
        print referer
    except Exception:
        pass
    
    response= requests.get(iframe,headers=headers,allow_redirects=False)
    data=response.text
    ssx4 = base64.b64decode(re_me(data,'ssx4" value=\"(.*?)\"'))
    print ( 'ssx4 is %s' %ssx4)
    ssx1 = base64.b64decode(re_me(data,'ssx1" value=\"(.*?)\"'))
    nssx4 = ssx4.replace("redirect/", "vod")
    print ( 'nssx4 is %s' %nssx4)
    playurl = nssx4 + ' playpath=' + ssx1 + ' pageUrl=' + iframe + ' token=wowk flashver=WIN%2011,9,900,117 swfUrl=http://www.thebestplayeronline.com/jwplayer5/addplayer/jwplayer.flash.swf'
    return playurl
Esempio n. 2
0
File: url.py Progetto: roeiba/xbmc
 def _urlcall(self, url, params, data, urltype):
     loglines = []
     urldata = ''
     try:
         if urltype == "get":
             urldata = _requests.get(url,
                                     params=params,
                                     timeout=self.timeout)
         elif urltype == "post":
             urldata = _requests.post(url,
                                      params=params,
                                      data=data,
                                      headers=self.headers,
                                      timeout=self.timeout)
         elif urltype == "delete":
             urldata = _requests.delete(url,
                                        params=params,
                                        data=data,
                                        headers=self.headers,
                                        timeout=self.timeout)
         loglines.append("the url is: " + urldata.url)
         loglines.append('the params are: ')
         loglines.append(params)
         loglines.append('the data are: ')
         loglines.append(data)
     except _requests.exceptions.ConnectionError, e:
         loglines.append('site unreachable at ' + url)
         loglines.append(e)
Esempio n. 3
0
	def getDevicesList(self):
		req = requests.get(self.baseURL.format('devices'),auth=(self.token,''))
		data = req.json()
		if 'error' in data:
			LOG(data['error'])
			raise PushbulletException(data['error'])
		return data.get('devices')
def ilive(page_data,domain=None):
    s = requests.Session()
    #headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36'}
    #headers = {'cache-control':'no-cache','User-Agent': 'Mozilla/5.0 (Windows NT 6.3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36'}
    url= page_data #'http://www.ilive.to/view/%s/' %id
    #resp, content = h.request(url, "GET",headers=headers)
    #r = s.get(url, headers = headers,allow_redirects=False)
    #content = r.text    
    #headers.update({'Cookie': r.headers['set-cookie']})
    #soup = BeautifulSoup(content)
    r = s.get(url, headers = headers,allow_redirects=False)
    content = r.text
    headers.update({'Cookie': r.headers['set-cookie']})
    c= re_me(content,r'''>c="(.*?)"''')
    
    d=''
    for i in range(len(c)):
        if i%3 == 0:
            d+=('%')
        else:
            d+=(c[i])
    
    uu= urllib2.unquote(d)
    print 'uu',uu
    ttk= re_me(uu,r'''Array\((.*?)\)''')
    tk = ttk.split(',')
    print 'tk',len(tk)
    
        
    x= re_me(content,r'''x\("(.*?)"\)''')
    print len(x)
    tag = iillive(x,tk)
    soup = BeautifulSoup(tag)
    print len(soup)
    l = soup('script', {'type':'text/javascript'})
    print len(l)
    for realdata in l:
        if realdata.find(text=re.compile('token')):
    #print 'found'
            match = re.compile(ilivee_regex,re.DOTALL).findall(realdata.text)    
    #match = re.compile(ilivee, re.DOTALL).findall(tag)
    print match
            
    #    match = re.compile(r'getJSON\(\"(.*?)\",.*?streamer\': \"(.*?)direct2watch(.*?)\",.*?\'file\': \'(.*?)\'.*?\'flash\', src:\s*\'(.*?)\'', re.DOTALL).findall(data)
    for token_url,swf,rtmp,playpath in match:
        headers.update({'Referer': '%s'%url}) #you must have this
        #r = s.get(token_url)
        #R_token = r.text
        token_url = 'http://www.ilive.to/server2.php?id='+getEpocTime()+'&_='+getEpocTime(milli='1')
        print token_url
        #headers.update({'Referer': '%s'%url,'cache-control':'no-cache'})
        r = requests.get(token_url,headers=headers)
        token = re_me(r.text,'token":"(.*?)"')
        playpath = playpath.split('.')
        
        rtmp = rtmp.replace("\\", "")  #.replace('rtmp','rtsp')
        app = rtmp.split("1935/")
        #print app[1]
        play= rtmp + ' app='+ app[1] +' playpath=' + playpath[0] + ' token=' + token + ' pageUrl=' +url + ' live=1 swfVfy=true timeout=30 swfUrl=' + swf
        return play
Esempio n. 5
0
def GetChannels():
    path = os.path.join(datapath, 'chan.xml')
    url = dixie.GetDixieUrl(DIXIEURL) + 'chan.xml'
    r = requests.get(url, auth=(username, password))

    with open(path, 'wb') as f:
        for chunk in r.iter_content(512):
            f.write(chunk)
Esempio n. 6
0
def download(url, dest, dp = None, start = 0, range = 100):    
    r = requests.get(url, auth=(username, password))

    with open(dest, 'wb') as f:
        for chunk in r.iter_content(1024):
            f.write(chunk)

        return
Esempio n. 7
0
def getFiles(url):
    url = dixie.GetDixieUrl(DIXIEURL) + 'update.txt'
    request = requests.get(url,
                           allow_redirects=False,
                           auth=(username, password))
    response = request.text

    return response
def download(url, dest, dp=None, start=0, range=100):
    r = requests.get(url, auth=(username, password))

    with open(dest, 'wb') as f:
        for chunk in r.iter_content(512):
            f.write(chunk)

        return
Esempio n. 9
0
 def getDevicesList(self):
     req = requests.get(self.baseURL.format('devices'),
                        auth=(self.token, ''))
     data = req.json()
     if 'error' in data:
         LOG(data['error'])
         raise PushbulletException(data['error'])
     return data.get('devices')
Esempio n. 10
0
def GetChannels():
    path = os.path.join(datapath, 'chan.xml')
    url  = dixie.GetDixieUrl(DIXIEURL) + 'chan.xml'
    r    = requests.get(url, auth=(username, password))
    
    with open(path, 'wb') as f:
        for chunk in r.iter_content(512):
            f.write(chunk)
    def _connect(self):
        if self.last_id:
            self.requests_kwargs['headers']['Last-Event-ID'] = self.last_id
        self.resp = requests.get(self.url, stream=True,
                                 **self.requests_kwargs)

        # TODO: Ensure we're handling redirects.  Might also stick the 'origin'
        # attribute on Events like the Javascript spec requires.
        self.resp.raise_for_status()
Esempio n. 12
0
def Network():
    url     = 'http://www.iplocation.net/'
    request = requests.get(url)
    link    = request.content
    match   = re.compile("<td width='80'>(.+?)</td><td>(.+?)</td><td>(.+?)</td><td>.+?</td><td>(.+?)</td>").findall(link)
    count   = 1
    
    for ip, region, country, isp in match:
        if count <2:
            utils.dialogOK('Your Public IP Address is: ' + ip, 'Your IP Address is based in: ' + country) 
            count = count+1
Esempio n. 13
0
    def download(self):
        import requests2 as requests

        tar_name = self.selected_build.tar_name
        filename = self.selected_build.filename

        utils.log("Download URL = " + self.selected_build.url)
        try:
            resp = requests.get(self.selected_build.url, stream=True)
            utils.log("Opened URL " + self.selected_build.url)
            bz2_size = int(resp.headers['Content-Length'])
            utils.log("Size of file = " + utils.size_fmt(bz2_size))

            if (os.path.isfile(filename) and
                os.path.getsize(filename) == bz2_size):
                # Skip the download if the file exists with the correct size.
                utils.log("Skipping download")
                pass
            else:
                # Do the download
                utils.log("Starting download of " + self.selected_build.url)
                with progress.FileProgress("Downloading",
                                           resp.raw, filename, bz2_size,
                                           self.background) as downloader:
                    downloader.start()
                utils.log("Completed download of " + self.selected_build.url)  
        except script_exceptions.Canceled:
            sys.exit(0)
        except requests.RequestException as e:
            utils.url_error(self.selected_build.url, str(e))
            sys.exit(1)
        except script_exceptions.WriteError as e:
            utils.write_error(os.path.join(__dir__, filename), str(e))
            sys.exit(1)

        # Do the decompression if necessary.
        if self.selected_build.compressed and not os.path.isfile(tar_name):
            try:
                bf = open(filename, 'rb')
                utils.log("Starting decompression of " + filename)
                with progress.DecompressProgress("Decompressing",
                                                 bf, tar_name, bz2_size,
                                                 self.background) as decompressor:
                    decompressor.start()
                utils.log("Completed decompression of " + filename)
            except script_exceptions.Canceled:
                sys.exit(0)
            except script_exceptions.WriteError as e:
                utils.write_error(os.path.join(__dir__, tar_name), str(e))
                sys.exit(1)
            except script_exceptions.DecompressError as e:
                utils.decompress_error(os.path.join(__dir__, filename), str(e))
                sys.exit(1)
Esempio n. 14
0
    def _downloadUrl(self, url):
        r = requests.get(url, auth=(username, password))
        status = r.status_code
        
        if status == 200:
            r.encoding = 'UTF-8'
            content = r.content

            return content

        else:
            return
Esempio n. 15
0
	def pushes(self,modified_after=0):
		params = {'modified_after':modified_after and '{0:10f}'.format(modified_after) or '0'}
		req = requests.get(self.baseURL.format('pushes'),auth=(self.token,''),params=params)
		try:
			data = req.json()
		except:
			if DEBUG:
				print repr(req.text)
			else:
				LOG('JSON decode error')
			
		return data.get('pushes')
Esempio n. 16
0
def checkFiles(url):
    url      = dixie.GetDixieUrl(DIXIEURL) + 'update.txt'
    request  = requests.get(url, allow_redirects=False, auth=(username, password))
    response = request.text
    code     = request.status_code
    reason   = request.reason
    
    print '----- Check OnTapp.TV Files -----'
    print '---------- status code ----------'
    print code

    return code
Esempio n. 17
0
def checkFiles(url):
    url = dixie.GetDixieUrl(DIXIEURL) + 'update.txt'
    request = requests.get(url,
                           allow_redirects=False,
                           auth=(username, password))
    response = request.text
    code = request.status_code
    reason = request.reason

    print '----- Check OnTapp.TV Files -----'
    print '---------- status code ----------'
    print code

    return code
Esempio n. 18
0
def getResponse():
    try:
        url      = dixie.GetDixieUrl(DIXIEURL) + 'update.txt'
        request  = requests.get(url, allow_redirects=False, auth=(username, password))
        code     = request.status_code
        response = request.content

        if not code == 200:
            response = re.sub('<(.+?)>', '', response)
            return {'Error' : response}

    except:
        pass
        
    return json.loads(u"" + (response))
Esempio n. 19
0
def getResponse():
    try:
        url = dixie.GetDixieUrl(DIXIEURL) + 'update.txt'
        request = requests.get(url,
                               allow_redirects=False,
                               auth=(username, password))
        code = request.status_code
        response = request.content

        if not code == 200:
            response = re.sub('<(.+?)>', '', response)
            return {'Error': response}

    except:
        pass

    return json.loads(u"" + (response))
Esempio n. 20
0
    def pushes(self, modified_after=0):
        params = {
            'modified_after':
            modified_after and '{0:10f}'.format(modified_after) or '0'
        }
        req = requests.get(self.baseURL.format('pushes'),
                           auth=(self.token, ''),
                           params=params)
        try:
            data = req.json()
        except:
            if DEBUG:
                print repr(req.text)
            else:
                LOG('JSON decode error')

        return data.get('pushes')
Esempio n. 21
0
 def _urlcall( self, url, params, data, urltype ):
     loglines = []        
     urldata = ''
     try:
         if urltype == "get":
             urldata = _requests.get( url, params=params, timeout=self.timeout )
         elif urltype == "post":
             urldata = _requests.post( url, params=params, data=data, headers=self.headers, timeout=self.timeout )
         elif urltype == "delete":
             urldata = _requests.delete( url, params=params, data=data, headers=self.headers, timeout=self.timeout )
         loglines.append( "the url is: " + urldata.url )
         loglines.append( 'the params are: ')
         loglines.append( params )
         loglines.append( 'the data are: ')
         loglines.append( data )
     except _requests.exceptions.ConnectionError, e:
         loglines.append( 'site unreachable at ' + url )
         loglines.append( e )
Esempio n. 22
0
    def get_links(self, arch, timeout=None):
        self.build_re = re.compile(self.BUILD_RE.format(arch))

        self._response = requests.get(self.url, timeout=timeout)
        if not self._response:
            raise BuildURLError("Build URL error: status {}".format(self._response.status_code))

        html = self._response.text
        args = ['a']
        if self.CLASS is not None:
            args.append(self.CLASS)
            
        soup = BeautifulSoup(html, 'html.parser',
                             parse_only=SoupStrainer(*args, href=self.build_re))
                        
        self._links = soup.contents

        for link in self._links:
            l = self._create_link(link)
            if l:
                yield l
Esempio n. 23
0
    def send_command(self, cmd, params=None, data=False):
        url = self._url_fmt.format(cmd)
        if params is not None:
            url += "&" + urllib.urlencode(params)

        utils.log_verbose(url)
        try:
            response = requests.get(url)
        except (requests.RequestException) as e:
            utils.log_error(str(e))
            return False
        else:
            if not response:
                return False
            elif data:
                return response.content
            else:
                utils.log_verbose(response)
                xml_resp = CameraXMLResponse(response)
                utils.log_verbose(xml_resp)
                if not xml_resp:
                    utils.log_error(xml_resp.message)
                return xml_resp
Esempio n. 24
0
# __author__ = 'zhengmj'
from PIL import Image
from StringIO import StringIO
import requests2
r = requests2.get('http://upload.server110.com/image/20130918/222JK917-0.png')
i = Image.open(StringIO(r.content))
i.show()
Esempio n. 25
0
 def maybe_get_tags(cls):
     if cls.tag_soup is None:
         html = requests.get("http://github.com/OpenELEC/OpenELEC.tv/releases").text
         cls.tag_soup = BeautifulSoup(html, 'html.parser',
                                      parse_only=SoupStrainer(cls.tag_match))
Esempio n. 26
0
def get_soup(param, value):
    url = BASE_URL + "/tales/?{}={}".format(param, value)
    soup = BeautifulSoup(requests.get(url).text)
    return soup
Esempio n. 27
0
def get_soup(param, value):
    url = BASE_URL + "/tales/?{}={}".format(param, value)
    soup = BeautifulSoup(requests.get(url).text)
    return soup
Esempio n. 28
0
def getFiles(url):
    url      = dixie.GetDixieUrl(DIXIEURL) + 'update.txt'
    request  = requests.get(url, allow_redirects=False, auth=(username, password))
    response = request.text

    return response