Ejemplo n.º 1
0
    def login(self):
        response = fetch("http://m.facebook.com/")

        m = re.search('''name="post_form_id" value="([^"]+?)"''', response.body)
        self.post_form_id = m.group(1)

        response = fetch(
            "https://www.facebook.com/login.php?m=m&refsrc=http%3A%2F%2Fm.facebook.com%2F&refid=0",
            data={
                "lsd": "off",
                "charset_test": "€,´,€,´,水,Д,Є",
                "version": "1",
                "ajax": "1",
                "width": "1280",
                "pxr": "1",
                "email": self.username,
                "pass": self.password,
                "submit": "Log In",
                "post_form_id": self.post_form_id,
            },
            headers={"Referer": "http://m.facebook.com/"},
        )

        set_cookie = response.getheader("Set-Cookie")
        self.cookies = sc2cs(set_cookie)

        url = response.getheader("location")
        response = fetch(url, headers={"Referer": "http://m.facebook.com/", "Cookie": self.cookies})

        self.post_form_id = re.search('''name="post_form_id" value="([^"]+?)"''', response.body).group(1)
        self.fb_dtsg = re.search('''name="fb_dtsg" value="([^"]+?)"''', response.body).group(1)

        return response
Ejemplo n.º 2
0
 def get(self, platform, soldier):
     ua = 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13'
     referer = 'http://bfbcs.com/' + platform
     cache_tag = 'bfbcs::' + platform + '/' + soldier
     raw = memcache.get(cache_tag)
     url = 'http://bfbcs.com/stats_' + platform + '/' + soldier
     if raw is None:
         response = urlfetch.fetch(url, headers={'User-Agent' : ua, 'Referer' : referer })
         raw = response.content
         memcache.set(cache_tag, raw, 600)
     pcode = re.findall('([a-z0-9]{32})', raw)
     self.response.out.write('<strong>PCODE</strong> ' + str(pcode[0]) + '<br />')
     if len(pcode) == 1:
         pcode = pcode[0]
         payload = 'request=addplayerqueue&pcode=' + pcode
         self.response.out.write('<strong>PAYLOAD</strong> ' + payload + ' (' + str(len(payload))+ ' bytes)<br />')
         headers = {'User-Agent' : ua, 'Referer' : url, 'X-Requested-With' : 'XMLHttpRequest', 'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8', 'Content-Length' : '61', 'Accept' : 'application/json, text/javascript, */*', 'Accept-Language' : 'en-us,en;q=0.5', 'Accept-Encoding' : 'gzip,deflate', 'Accept-Charset' : 'ISO-8859-1,utf-8;q=0.7,*;q=0.7', 'Keep-Alive' : 115, 'Host' : 'bfbcs.com', 'Pragma' : 'no-cache', 'Cache-Control' : 'no-cache', 'Cookie' : '__utma=7878317.1843709575.1297205447.1298572822.1298577848.12; __utmz=7878317.1297205447.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); sessid=enqd028n30d2tr4lv4ned04qi0; __utmb=7878317.21.10.1298577848; __utmc=7878317' }
         response = urlfetch.fetch(url, payload=payload, headers=headers, method='POST')
         if response.status_code == 500:
             response = urlfetch.fetch(url, payload=payload, headers=headers, method='POST')
             if response.status_code == 500:
                 self.write('<strong>FAILED</strong>')
             else:
                 self.write('<strong>RESULT</strong> OK ' + response.content)
         else:
             self.write('<strong>RESULT</strong> OK ' + response.content)
Ejemplo n.º 3
0
def pub2fanfou(username, password, status):
    # 获取表单token
    response = urlfetch.fetch("http://m.fanfou.com/")
    token = re.search('''name="token".*?value="(.*?)"''', response.body).group(1)

    # 登录
    response = urlfetch.fetch(
        "http://m.fanfou.com/",
        data={"loginname": username, "loginpass": password, "action": "login", "token": token, "auto_login": "******"},
        headers={"Referer": "http://m.fanfou.com/"},
    )

    # cookies
    cookiestring = response.cookiestring
    print cookiestring

    # 获取表单token
    response = urlfetch.fetch(
        "http://m.fanfou.com/home", headers={"Cookie": cookiestring, "Referer": "http://m.fanfou.com/home"}
    )
    token = re.search('''name="token".*?value="(.*?)"''', response.body).group(1)

    # 发布状态
    response = urlfetch.fetch(
        "http://m.fanfou.com/",
        data={"content": status, "token": token, "action": "msg.post"},
        headers={"Cookie": cookiestring, "Referer": "http://m.fanfou.com/home"},
    )
Ejemplo n.º 4
0
    def login(self):
        response = fetch(
            "http://m.douban.com/"
        )

        set_cookie = response.getheader('Set-Cookie')
        self.cookies = sc2cs(set_cookie)

        self.session = response.getheader('location').split('=', 1)[-1]

        response = fetch(
            "http://m.douban.com/",
            data = {
                'form_email': self.username,
                'form_password': self.password,
                'redir': '',
                'user_login': '******',
                'session': self.session
            },
            headers = {
                'Referer': 'http://m.douban.com/',
                'Cookie': self.cookies,
            }
        )
        set_cookie = response.getheader('Set-Cookie')
        self.cookies += "; " + sc2cs(set_cookie)

        return response
Ejemplo n.º 5
0
def resolve_url(url):
  headers=HTTP_DESKTOP_UA
  cookie = Cookie.SimpleCookie()
  
  form_fields = {
   "inputUserName": __settings__.getSetting('username'),
   "inputPassword": __settings__.getSetting('password')
  }

  form_data = urllib.urlencode(form_fields)

  response = urlfetch.fetch(
    url = 'http://4share.vn/?control=login',
	method='POST',
    headers = headers,
	data=form_data,
    follow_redirects = False)

  cookie.load(response.headers.get('set-cookie', ''))
  headers['Cookie'] = _makeCookieHeader(cookie)
  
  response = urlfetch.fetch(url,headers=headers, follow_redirects=True)

  soup = BeautifulSoup(response.content, convertEntities=BeautifulSoup.HTML_ENTITIES)
  for item in soup.findAll('a'):
    if item['href'].find('uf=')>0:
      url=item['href']

  if url.find('uf=')<0:
    xbmc.executebuiltin((u'XBMC.Notification("%s", "%s", %s)' % ('Authentication', 'Please check your 4share username/password', '15')).encode("utf-8"))   
    return
	  
  item = xbmcgui.ListItem(path=url)
  xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, item)
Ejemplo n.º 6
0
def fetch(scratchdir, dataset, url):
    """
    Caching URL fetcher, returns filename of cached file.
    """
    filename = os.path.join(scratchdir, os.path.basename(url))
    print 'Fetching dataset %r from %s' % (dataset, url)
    urlfetch.fetch(url, filename)
    return filename
Ejemplo n.º 7
0
def fetch(scratchdir, dataset, url):
    """
    Caching URL fetcher, returns filename of cached file.
    """
    filename = os.path.join(scratchdir, os.path.basename(url))
    print 'Fetching dataset %r from %s' % (dataset, url)
    urlfetch.fetch(url, filename)
    return filename
Ejemplo n.º 8
0
def pub2fanfou(username, password, status):
    #获取表单token
    response = urlfetch.fetch(
        "http://m.fanfou.com/"
    )
    token = re.search('''name="token".*?value="(.*?)"''', response.body).group(1)
    
    #登录
    response = urlfetch.fetch(
        "http://m.fanfou.com/",
        data = {
            'loginname': username,
            'loginpass': password,
            'action': 'login',
            'token': token,
            'auto_login': '******',
        },
        headers = {
            "Referer": "http://m.fanfou.com/",
        }
    )
    
    #cookies
    cookies = urlfetch.sc2cs(response.getheader('Set-Cookie'))
    print cookies
    
    #获取表单token
    response = urlfetch.fetch(
        "http://m.fanfou.com/home",
        headers = {
            'Cookie': cookies,
            'Referer': "http://m.fanfou.com/home",
        }
    )
    token = re.search('''name="token".*?value="(.*?)"''', response.body).group(1)
    
    #发布状态
    response = urlfetch.fetch(
        "http://m.fanfou.com/",
        data = {
            'content': status,
            'token': token,
            'action': 'msg.post',
        },
        headers = {
            'Cookie': cookies,
            'Referer': "http://m.fanfou.com/home",
        }
    )
Ejemplo n.º 9
0
def queryRdfStore(endPoint, query):

    try:
        url = endPoint + urllib.urlencode({"query": query})

    except UnicodeEncodeError:
        return ""

    jsonresult = urlfetch.fetch(
        url,
        deadline=30,
        method=urlfetch.GET,
        headers={"accept": "application/sparql-results+json"})

    if (jsonresult.status_code == 200):
        res = json.loads(jsonresult.content)

        res_var = res['head']['vars']

        response = []
        for row in res['results']['bindings']:
            dic = {}
            for var in res_var:
                dic[var] = row[var]['value']

            response.append(dic)

        return response
    else:
        return {"error": jsonresult.content}
Ejemplo n.º 10
0
def getLocation(city):

    api_key = "wV3l1uxVevrxnA6e"

    city.replace(" ", "%20")

    url = "http://api.songkick.com/api/3.0/search/locations.xml?query=" + city + "&apikey=" + api_key

    locationXML = urlfetch.fetch(url, deadline=60, method=urlfetch.GET)

    if locationXML.status_code == 200:
        location = []

        tree = etree.fromstring(locationXML.content)
        location = tree.find('results/location/metroArea')
        locationId = location.attrib['id']
        locationLat = location.attrib['lat']
        locationLong = location.attrib['lng']

        location.insert(0, locationId)
        location.insert(1, locationLat)
        location.insert(2, locationLong)

        return location

    else:
        # Need better error handling
        print "Location does not exist or something else went wrong with the connection to the Songkick server."
        sys.exit()
Ejemplo n.º 11
0
 def get(self):
     site = GetSite()
     browser = detect(self.request)
     member = CheckAuth(self)
     l10n = GetMessages(self, member, site)
     self.session = Session()
     if member:
         source = 'http://daydream/stream/' + str(member.num)
         result = urlfetch.fetch(source)
         images = json.loads(result.content)
         template_values = {}
         template_values['images'] = images
         template_values['site'] = site
         template_values['member'] = member
         template_values['page_title'] = site.title + u' › 图片上传'
         template_values['l10n'] = l10n
         template_values['system_version'] = SYSTEM_VERSION
         if 'message' in self.session:
             template_values['message'] = self.session['message']
             del self.session['message']
         path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop')
         t =self.get_template(path, 'images_home.html')
         self.finish(t.render(template_values))
     else:
         self.redirect('/signin')
Ejemplo n.º 12
0
def getUserArtists(username, nrOfArtists):

    api_key = "7e6d32dffbf677fc4f766168fd5dc30e"
    #secret = "5856a08bb3d5154359f22daa1a1c732b"

    url = "http://ws.audioscrobbler.com/2.0/?method=user.gettopartists&user="******"&limit=" + str(nrOfArtists) + "&api_key=" + api_key

    artistsXML = urlfetch.fetch(url,deadline=60,method=urlfetch.GET)

    artists = []

    if artistsXML.status_code == 200:

        tree = etree.fromstring(artistsXML.content)

        for artist in tree.findall('topartists/artist'):
            rank = artist.attrib['rank']
            name = artist.find('name')
            artists.insert(int(rank), name.text)

    else:
        # Need better error handling
        print "Last FM User does not exist or something else went wrong with the connection to the Last FM server."
        sys.exit()

    return artists
Ejemplo n.º 13
0
def getLocInfo(lat, long):

    results = []
    index = 0

    api_key = "AIzaSyDva2nYRJnjiQ-BW-I67_5m7GxA_19gA7Y"

    url = "https://maps.googleapis.com/maps/api/place/search/xml?location=" + lat + "," + long + "&radius=10000&types=amusement_park|museum|shopping_mall|zoo|point_of_interest&sensor=false&key=" + api_key

    poiXML = urlfetch.fetch(url, deadline=60, method=urlfetch.GET)

    if poiXML.status_code == 200:

        tree = etree.fromstring(poiXML.content)

        for poi in tree.findall('result'):
            poiName = poi.find('name').text
            results.insert(index, poiName)
            index += 1

        return results

    else:
        print "Something went wrong with the connection to the Google Places server"
        sys.exit()
def search(url):  #1
    try:
        keyb = xbmc.Keyboard(
            '', '[COLOR yellow]Nhâp tên phim cần tìm kiếm[/COLOR]')
        keyb.doModal()
        if (keyb.isConfirmed()):
            searchText = urllib.quote_plus(keyb.getText())
        if 'vaphim' in url:
            response = urlfetch.fetch('http://vaphim.com/?s=' +
                                      urllib.quote_plus(searchText))
            items = re.compile(
                "<a data=.+?<img.+?src=\"(.+?)\".+?<a href=\"(http://vaphim.com/.+?)\".+?>(.+?)</a>",
                re.DOTALL).findall(response.content)
            for item in items:
                name = re.sub(r'<.+?>', r'-', item[2])
                addDir(name, item[1], 5, item[0], isFolder=True)
        elif 'mphim' in url:
            url = 'http://mphim.net/tim-kiem/%s/trang-1.html' % (
                searchText.replace(' ', '-').encode("utf-8"))
            medialist(url, page)
        elif 'fphim' in url:
            url = 'http://fsharefilm.com/?s=%s' % (searchText.encode("utf-8"))
            search_result(url, page)
    except:
        pass
Ejemplo n.º 15
0
def bestchallenge_comics():
    logger = logging.getLogger(__name__ + '.bestchallenge_comics')
    url_format = BESTCHALLENGE_LIST_URL + '?page={0}'
    last_url = url_format.format(999999)
    with urlfetch.fetch(last_url, cache, 120) as f:
        html = lxml.html.parse(f)
    logger.info(last_url)
    last = html.xpath('//*[@id="content"]//*[contains(concat(" ", @class,'
                      '" "), " paginate ")]//strong[@class="page"]/em/text()')
    last_html = html
    last = int(last[0])

    def get_html(page):
        if page == last:
            return last_html
        url = url_format.format(page)
        with urlfetch.fetch(url, cache, 120) as f:
            logger.info(url)
            html = lxml.html.parse(f)
        return html

    pool = Pool(POOL_SIZE)
    htmls = pool.map(get_html, xrange(1, last + 1))
    for html in htmls:
        links = html.xpath('//*[@id="content"]//table[@class="challengeList"]'
                           '//td/*[@class="fl"]/a')
        for a in links:
            href = a.attrib['href']
            query = href[href.index('?') + 1:]
            title_id = int(werkzeug.urls.url_decode(query)['titleId'])
            yield title_id, a.xpath('./img/@title')[0]
Ejemplo n.º 16
0
def main():
    channel = []
    result = urlfetch.fetch(__homeUrl__,headers=reg)
    soup = BeautifulSoup(result.content, convertEntities=BeautifulSoup.HTML_ENTITIES)
    items = soup.findAll('div', {'class' : 'item_view'})
    for item in items:
            
        common = item.find('a', {'class' : 'tv_channel '})
        if common == None :
          common = item.find('a', {'class' : 'tv_channel active'})

        lock = item.find('img', {'class' : 'lock'})
        if lock == None :

          title = common.get('title')       
          url = common.get('data-href')
          thumb = common.find('img',{'class':'img-responsive'}).get('data-original')
          thumb = thumb.split('?')
          if 'giai-tri-tv' in url or 'phim-viet' in url or 'the-thao-tv-hd' in url or 'kenh-17' in url or 'e-channel' in url or 'hay-tv' in url or 'ddramas' in url or 'bibi' in url or 'o2-tv' in url or 'info-tv' in url or 'style-tv' in url or 'invest-tv' in url or 'yeah1' in url:
            pass
          else :		  
            data = {
                'label': title.replace('-',' '),
                'path': xbmcplugin.url_for('plays', id = url.replace('http://fptplay.net/livetv/','')),
                'thumbnail':thumb[0],
                'is_playable': True
                }
            channel.append(data)

    xbmc.executebuiltin('Container.SetViewMode(%d)' % 500)		
    return xbmcplugin . finish ( channel )
Ejemplo n.º 17
0
    def get_posts(self):
        
        dd = datetime.fromordinal(date.today().toordinal()) - timedelta(days = 31)
        dd = dd.isoformat("T") + "Z"
        str_yesterday = str(dd)
        print str_yesterday,"sst"
        result = graph.search(term = "vodafone", type = 'post', page = False, retry = 2, 
		since = str_yesterday)
        while len(result['data']) != 0:
            for ele in result['data']:
                post={
                "kind":"Facebook"}
                post.update({"object":ele})
                post_id = posts.insert(post)
                #post['object']['message']
                #print json.dumps(post['object'], sort_keys=True, indent=4),"eleeeeeeee"
                
            #print posts.find_one({"type":"video"}),"edeee"
            
            if 'paging' in result:
                resultt=json.loads(urlfetch.fetch(result['paging']['next']).content)
                if result==resultt:
                    result['data']=""
                    print "iiiii"
                else:
                    try:
                        print "uuuuuu"
                        result = resultt
                    except Exception:
                        pass
            else:
                print "pppppppp"
                result['data']=""
Ejemplo n.º 18
0
def login():
  #if cache.get('cookie') is not None and cache.get('cookie')<>'':
  #  print 'Cache ' + cache.get('cookie')
  #  return True

  cookie = Cookie.SimpleCookie()
  
  form_fields = {
   "inputUserName": __settings__.getSetting('username'),
   "inputPassword": __settings__.getSetting('password')
  }

  form_data = urllib.urlencode(form_fields)

  response = urlfetch.fetch(
    url = 'http://4share.vn/?control=login',
	method='POST',
    headers = headers,
	data=form_data,
    follow_redirects = False)

  cookie.load(response.headers.get('set-cookie', ''))
  headers['Cookie'] = _makeCookieHeader(cookie)

  cache.set('cookie',headers['Cookie'])

  if response.status_code<>302:
    xbmc.executebuiltin((u'XBMC.Notification("%s", "%s", %s)' % ('Login', 'Login failed. You must input correct 4share username/pass in Add-on settings', '15')).encode("utf-8"))   
    return False
  else:
    xbmc.executebuiltin((u'XBMC.Notification("%s", "%s", %s)' % ('Login', 'Login successful', '15')).encode("utf-8"))   
    return True
Ejemplo n.º 19
0
def doLogin():

	cookie = Cookie.SimpleCookie()
	
	form_fields = {
		"login_useremail": __settings__.getSetting('username'),
		"login_password": __settings__.getSetting('password'),
		"url_refe": "https://www.fshare.vn/index.php"
	}

	form_data = urllib.urlencode(form_fields)
	
	response = urlfetch.fetch(
		url = 'https://www.fshare.vn/login.php',
		method='POST',
		headers = headers,
		data=form_data,
		follow_redirects = False
	)

	cookie.load(response.headers.get('set-cookie', ''))
	headers['Cookie'] = _makeCookieHeader(cookie)
	
	if headers['Cookie'].find('-1')>0:
		xbmc.executebuiltin((u'XBMC.Notification("%s", "%s", %s)' % ('Login', 'Login failed. You must input correct FShare username/pass in Add-on settings', '15')).encode("utf-8"))	 
		return False
	else:
		return headers['Cookie']
Ejemplo n.º 20
0
def doLogin():

    cookie = Cookie.SimpleCookie()
    #method = urlfetch.POST

    form_fields = {
        "login_useremail": __settings__.getSetting('username'),
        "login_password": __settings__.getSetting('password'),
        "url_refe": "https://www.fshare.vn/index.php"
    }

    form_data = urllib.urlencode(form_fields)

    response = urlfetch.fetch(url='https://www.fshare.vn/login.php',
                              method='POST',
                              headers=headers,
                              data=form_data,
                              follow_redirects=False)

    cookie.load(response.headers.get('set-cookie', ''))
    headers['Cookie'] = _makeCookieHeader(cookie)

    if headers['Cookie'].find('-1') > 0:
        xbmc.executebuiltin((u'XBMC.Notification("%s", "%s", %s)' % (
            'Login',
            'Login failed. You must input correct FShare username/pass in Add-on settings',
            '15')).encode("utf-8"))
        return False
    else:
        # xbmc.executebuiltin((u'XBMC.Notification("%s", "%s", %s)' % ('Login', 'Login successful', '15')).encode("utf-8"))
        return headers['Cookie']
Ejemplo n.º 21
0
def check_online(type='vk'):
    logger.debug('Check online')
    users = db(db.auth_user).select()   # all registered users
    for user in users:
        # if db(db.user_extra).select(auth_id=auth.user_id):
        #     pass
        if type == 'vk':
            status = loads(fetch(url='https://api.vk.com/method/users.get?user_ids=%s&fields=online'
                                     %user['vk_uid']).content)['response'][0]['online']
            logger.debug("%s %s"%(user['last_name'], status))
            user_exist = db(db.user_extra.auth_id==user['id']).select()  # number of all exist auth_users in user_extra table
            timeline_table = db(db.timeline.user_extra_id==user['id']).select()
            now_time = datetime.now()
            if status and len(user_exist):
                if  not len(timeline_table) or timeline_table[-1]['end_time']:  # if not exist end_time record
                    logger.debug('Insert')
                    db.timeline.insert(week_day=now_time.strftime('%A %d %b'),
                                       user_extra_id=user['id'],
                                       start_time=now_time.isoformat())
                    db.commit()
                else:
                    continue
            elif len(user_exist):
                if (len(timeline_table) and
                        timeline_table[-1]['start_time'] and
                        not timeline_table[-1]['end_time']):
                    logger.debug('Update')
                    timeline_table[-1].end_time=now_time.isoformat()
                    timeline_table[-1].update_record()
        elif type == 'facebook':
            pass

    return True or False
Ejemplo n.º 22
0
def queryRdfStore(endPoint, query):

    try:
        url = endPoint + urllib.urlencode({"query" : query})

    except UnicodeEncodeError:
        return ""

    jsonresult = urlfetch.fetch(url,deadline=30,method=urlfetch.GET, headers={"accept" : "application/sparql-results+json"})

    if(jsonresult.status_code == 200):
        res = json.loads(jsonresult.content)
        
        res_var = res['head']['vars']
        
        response = []
        for row in res['results']['bindings']:
            dic = {}
            for var in res_var:
                dic[var] = row[var]['value']
                
            response.append(dic)
                        
        return response    
    else:
        return {"error" : jsonresult.content} 
Ejemplo n.º 23
0
def get_static_google_map(request, filename=None, crop=False):
    response = urlfetch.fetch(request)

    # check for an error (no image at requested location)
    if response.getheader('x-staticmap-api-warning') is not None:
        return None

    try:
        img = Image.open(cStringIO.StringIO(response.content))
    except IOError:
        print "IOError:", imgdata.read(
        )  # print error (or it may return a image showing the error"
        return None
    else:
        img = np.asarray(img.convert("RGB"))

    # there seems not to be any simple way to check for the gray error image
    # that Google throws when going above the API limit -- so here's a hack.
    if (img == 224).sum() / float(img.size) > 0.95:
        return None

    # remove the Google watermark at the bottom of the image
    if crop:
        img_shape = img.shape
        img = img[:int(img_shape[0] * 0.85), :int(img_shape[1] * 0.85)]

    if filename is not None:
        basedir = os.path.dirname(filename)
        if not os.path.exists(basedir) and basedir not in ["", "./"]:
            os.makedirs(basedir)
        io.imsave(filename, img)
    return img
Ejemplo n.º 24
0
def getLocInfo(lat, long):

    results = []
    index = 0

    api_key = "AIzaSyDva2nYRJnjiQ-BW-I67_5m7GxA_19gA7Y"

    url = (
        "https://maps.googleapis.com/maps/api/place/search/xml?location="
        + lat
        + ","
        + long
        + "&radius=10000&types=amusement_park|museum|shopping_mall|zoo|point_of_interest&sensor=false&key="
        + api_key
    )

    poiXML = urlfetch.fetch(url, deadline=60, method=urlfetch.GET)

    if poiXML.status_code == 200:

        tree = etree.fromstring(poiXML.content)

        for poi in tree.findall("result"):
            poiName = poi.find("name").text
            results.insert(index, poiName)
            index += 1

        return results

    else:
        print "Something went wrong with the connection to the Google Places server"
        sys.exit()
Ejemplo n.º 25
0
def resolve_url(url):
	if freeAccount == 'true':
		response = urlfetch.fetch("http://feed.hdrepo.com/fshare.php")
		if response.status == 200:
			headers['Cookie'] = response.content
		else:
			xbmc.executebuiltin((u'XBMC.Notification("%s", "%s", %s)' % ('Login', 'Server only accepts 1 request/minute', '5000')).encode("utf-8"))	 
			return
	else:
		headers['Cookie'] = doLogin()

	response = urlfetch.get(url,headers=headers, follow_redirects=False)
	if response.status==302 and response.headers['location'].find('logout.php')<0:
		url=response.headers['location']
		# logout
		if freeAccount == 'true':
			cookie = Cookie.SimpleCookie()
			cookie.load(response.headers.get('set-cookie', ''))
			headers['Cookie'] = _makeCookieHeader(cookie)
			urlfetch.get("https://www.fshare.vn/logout.php",headers=headers, follow_redirects=False)
	else:
		if response.status==200:
			soup = BeautifulSoup(str(response.content), convertEntities=BeautifulSoup.HTML_ENTITIES)		
			item = soup.find('form', {'name' : 'frm_download'})
			if item:
				url = item['action']
		else:
			xbmc.executebuiltin((u'XBMC.Notification("%s", "%s", %s)' % ('Login', 'Login failed. You must input correct FShare username/pass in Add-on settings', '5000')).encode("utf-8"))	 
			return
	
	item = xbmcgui.ListItem(path=url)
	xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, item)
Ejemplo n.º 26
0
def getLocation(city):

    api_key = "wV3l1uxVevrxnA6e"

    city.replace(" ","%20")

    url = "http://api.songkick.com/api/3.0/search/locations.xml?query=" + city + "&apikey=" + api_key

    locationXML = urlfetch.fetch(url,deadline=60,method=urlfetch.GET)

    if locationXML.status_code == 200:
        location = []

        tree = etree.fromstring(locationXML.content)
        location = tree.find('results/location/metroArea')
        locationId = location.attrib['id']
        locationLat = location.attrib['lat']
        locationLong = location.attrib['lng']

        location.insert(0, locationId)
        location.insert(1, locationLat)
        location.insert(2, locationLong)

        return location

    else:
        # Need better error handling
        print "Location does not exist or something else went wrong with the connection to the Songkick server."
        sys.exit()
Ejemplo n.º 27
0
def get_title_thumbnail_url(title_id, pair=False, default=None):
    """Gets the thumbnail image URL of the title.
    
    :param title_id: the title id
    :type title_id: :class:`int`, :class:`long`
    :param pair: if ``True`` it returns a :class:`tuple`. otherwise it returns
                 just a thumbnail url. ``False`` by default
    :type pair: :class:`bool`
    :param default: a default value used when there's no cache. if not present,
                    gets thumbnail url from web
    :returns: a pair of ``(title_id, thumbnail_url)`` or just a thubnail url
              if ``pair`` is ``False``
    :rtype: :class:`tuple`, :class:`basestring`

    """
    logger = logging.getLogger(__name__ + '.get_title_thumbnail_url')
    cache_key = 'title_thumbnail_{0}'.format(title_id)
    cached = cache.get(cache_key)
    if cached:
        logger.info('used cached of title %d', title_id)
        return (title_id, cached) if pair else cached
    if default is not None:
        return default
    url = URL_TYPES['webtoon'].format(title_id)
    with urlfetch.fetch(url, cache, 120) as f:
        logger.info('downloaded title %d from %s', title_id, url)
        html = lxml.html.parse(f)
        img = html.xpath('//div[@class="thumb"]//img/@src')
        img_src = unicode(img[0])
        cache.set(cache_key, img_src)
        return (title_id, img_src) if pair else img_src
Ejemplo n.º 28
0
def doLogin():

    cookie = Cookie.SimpleCookie()
    # method = urlfetch.POST

    form_fields = {
        "login_useremail": __settings__.getSetting("username"),
        "login_password": __settings__.getSetting("password"),
        "url_refe": "https://www.fshare.vn/index.php",
    }

    form_data = urllib.urlencode(form_fields)

    response = urlfetch.fetch(
        url="https://www.fshare.vn/login.php", method="POST", headers=headers, data=form_data, follow_redirects=False
    )

    cookie.load(response.headers.get("set-cookie", ""))
    headers["Cookie"] = _makeCookieHeader(cookie)
    # cache.set('cookie',headers['Cookie'])

    if headers["Cookie"].find("-1") > 0:
        xbmc.executebuiltin(
            (
                u'XBMC.Notification("%s", "%s", %s)'
                % ("Login", "Login failed. You must input correct FShare username/pass in Add-on settings", "15")
            ).encode("utf-8")
        )
        return False
    else:
        # xbmc.executebuiltin((u'XBMC.Notification("%s", "%s", %s)' % ('Login', 'Login successful', '15')).encode("utf-8"))
        return headers["Cookie"]
Ejemplo n.º 29
0
def getChannels(url):
    cns = []
    result = None
    result = urlfetch.fetch(
        url,
        headers={
            'User-Agent':
            'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36'
        })
    if result.status_code != 200:
        plugin.log.error('Something wrong when get list fpt play channel !')
        return None
    soup = BeautifulSoup(result.content,
                         convertEntities=BeautifulSoup.HTML_ENTITIES)

    items = soup.findAll('div', {'class': 'hover01'})
    for item in items:

        ac = item.find('a', {'class': 'tv_channel '})

        if ac == None:
            ac = item.find('a', {'class': 'tv_channel active'})
            if ac == None:
                continue

        lock = item.find('img', {'class': 'lock'})

        if lock != None:
            continue

        dataref = ac.get('data-href')

        if dataref == None:
            continue

        img = ac.find('img', {'class': 'img-responsive'})

        imgthumbnail = ''

        if img != None:
            imgthumbnail = img.get('data-original')

        if not dataref.startswith(crawurl):
            continue

        channelid = dataref[27:]

        if not channelid:
            continue

        title = channelid
        cn = {
            'label': title,
            'path': plugin.url_for('plays', id=channelid),
            'thumbnail': imgthumbnail,
            'is_playable': True
        }
        cns.append(cn)
    return cns
Ejemplo n.º 30
0
def get_tx(tx_hash):
  url = BASE_BLOCKCHAIN_URL + '/rawtx/%s' % (tx_hash)
  result = urlfetch.fetch(url)
  if result.status_code == 200:
    return json.loads(result.content)
  else:
    logging.error('There was an error contacting the Blockchain.info API')
    return None
Ejemplo n.º 31
0
 def send_request(self, request):
     resp = urlfetch.fetch(url=request.url,
                           headers=request.headers,
                           method=request.method,
                           data=request.body,
                           deadline=self.config.timeout,
                           **self.options)
     return process_response(resp.status_code, resp.content)
Ejemplo n.º 32
0
def get_remote_bytes(file_url) -> io.BytesIO:
    """
    Download remote file and return its bytes object
    :param file_url: URL to the file
    :return:
    """
    result = urlfetch.fetch(file_url)
    return io.BytesIO(result.content)
Ejemplo n.º 33
0
    def test_fetch_data(self,):
        r = urlfetch.fetch(testlib.test_server_host, data='foo=bar')
        o = json.loads(r.text)

        self.assertEqual(r.status, 200)
        self.assertTrue(isinstance(r.json, dict))
        self.assertTrue(isinstance(r.text, urlfetch.unicode))
        self.assertEqual(o['method'], 'POST')
Ejemplo n.º 34
0
def url_get(url):
    """Retrieve a URL using HTTP GET."""
    if gae_urlfetch:
        logging.debug("url_get(" + url + ") with GAE")
        return gae_urlfetch.fetch(url)
    else:
        logging.debug("url_get(" + url + ") with urlfetch")
        return urlfetch.fetch(url, deadline =DEADLINE_FETCH)
Ejemplo n.º 35
0
def url_get(url):
    """Retrieve a URL using HTTP GET."""
    if gae_urlfetch:
        logging.debug("url_get(" + url + ") with GAE")
        return gae_urlfetch.fetch(url)
    else:
        logging.debug("url_get(" + url + ") with urlfetch")
        return urlfetch.fetch(url, deadline=DEADLINE_FETCH)
Ejemplo n.º 36
0
 def get_html(page):
     if page == last:
         return last_html
     url = url_format.format(page)
     with urlfetch.fetch(url, cache, 120) as f:
         logger.info(url)
         html = lxml.html.parse(f)
     return html
Ejemplo n.º 37
0
    def test_fetch(self):
        r = urlfetch.fetch(testlib.test_server_host)
        o = json.loads(r.text)

        self.assertEqual(r.status, 200)
        self.assertTrue(isinstance(r.json, dict))
        self.assertTrue(isinstance(r.text, urlfetch.unicode))
        self.assertEqual(o['method'], 'GET')
Ejemplo n.º 38
0
def getChannels(url):
    cns = []
    result = None
    result = urlfetch.fetch(
        url,
        headers={
            "User-Agent": "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36"
        },
    )
    if result.status_code != 200:
        plugin.log.error("Something wrong when get list fpt play channel !")
        return None
    soup = BeautifulSoup(result.content, convertEntities=BeautifulSoup.HTML_ENTITIES)

    items = soup.findAll("div", {"class": "hover01"})
    for item in items:

        ac = item.find("a", {"class": "tv_channel "})

        if ac == None:
            ac = item.find("a", {"class": "tv_channel active"})
            if ac == None:
                continue

        lock = item.find("img", {"class": "lock"})

        if lock != None:
            continue

        dataref = ac.get("data-href")

        if dataref == None:
            continue

        img = ac.find("img", {"class": "img-responsive"})

        imgthumbnail = ""

        if img != None:
            imgthumbnail = img.get("data-original")

        if not dataref.startswith(crawurl):
            continue

        channelid = dataref[27:]

        if not channelid:
            continue

        title = channelid
        cn = {
            "label": title,
            "path": plugin.url_for("plays", id=channelid),
            "thumbnail": imgthumbnail,
            "is_playable": True,
        }
        cns.append(cn)
    return cns
Ejemplo n.º 39
0
def get_coords(url):
    import urlfetch
    try:
        page = urlfetch.fetch(url)
    except urllib2.URLError, e:
        if e.getcode() == 500:
            content = e.read()
        else:
            raise
Ejemplo n.º 40
0
def getChannels(url):
    cns = []
    result = None
    result = urlfetch.fetch(
        url,
        headers={
            'User-Agent':'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36'
            })
    if result.status_code != 200 :
        plugin.log.error('Something wrong when get list fpt play channel !')
        return None
    soup = BeautifulSoup(result.content, convertEntities=BeautifulSoup.HTML_ENTITIES)

    items = soup.findAll('div', {'class' : 'hover01'})
    for item in items:

        ac = item.find('a', {'class' : 'tv_channel '})

        if ac == None :
            ac = item.find('a', {'class' : 'tv_channel active'})
            if ac == None :
                continue

        lock = item.find('img', {'class' : 'lock'})

        if lock != None :
            continue

        dataref = ac.get('data-href')

        if dataref == None :
            continue

        img = ac.find('img', {'class' : 'img-responsive'})

        imgthumbnail = ''

        if img != None :
            imgthumbnail = img.get('data-original')

        if not dataref.startswith(crawurl) :
            continue

        channelid = dataref[27:]

        if not channelid :
            continue

        title = channelid
        cn = {
                'label': title,
                'path': plugin.url_for('plays', id = channelid),
                'thumbnail':imgthumbnail,
                'is_playable': True
            }
        cns.append(cn)
    return cns
Ejemplo n.º 41
0
def get_txs_for_addr(addr, limit=5):
  url = BASE_BLOCKCHAIN_URL + '/address/%s?format=json&limit=%s' % (addr, limit)
  result = urlfetch.fetch(url)
  if result.status_code == 200:
    j = json.loads(result.content)
    return [(tx['hash'], tx['time']) for tx in j['txs']]
  else:
    logging.error('Error accessing blockchain API: ' + str(result.status_code))
    return None
Ejemplo n.º 42
0
    def test_fetch_data(self, ):
        r = urlfetch.fetch(testlib.test_server_host, data='foo=bar')
        o = json.loads(r.text)

        self.assertEqual(r.status, 200)
        self.assertTrue(isinstance(r.json, dict))
        self.assertTrue(isinstance(r.text, urlfetch.unicode))
        self.assertEqual(r.links, [])
        self.assertEqual(o['method'], 'POST')
Ejemplo n.º 43
0
    def test_fetch(self):
        r = urlfetch.fetch(testlib.url())
        o = json.loads(r.text)

        self.assertEqual(r.status, 200)
        self.assertTrue(isinstance(r.json, dict))
        self.assertTrue(isinstance(r.text, urlfetch.unicode))
        self.assertEqual(r.links, [])
        self.assertEqual(o['method'], 'GET')
Ejemplo n.º 44
0
    def test_fetch(self):
        d = testlib.randdict()
        data = urlfetch.urlencode(d)

        r = urlfetch.fetch(testlib.test_server_host, data=data)
        o = json.loads(r.text)

        self.assertEqual(r.status, 200)
        self.assertEqual(o['method'], 'POST')
        self.assertEqual(o['post'], d)
Ejemplo n.º 45
0
def pub2fanfou(username, password, status):
    #获取表单token
    response = urlfetch.fetch("http://m.fanfou.com/")
    token = re.search('''name="token".*?value="(.*?)"''',
                      response.body).group(1)

    #登录
    response = urlfetch.fetch("http://m.fanfou.com/",
                              data={
                                  'loginname': username,
                                  'loginpass': password,
                                  'action': 'login',
                                  'token': token,
                                  'auto_login': '******',
                              },
                              headers={
                                  "Referer": "http://m.fanfou.com/",
                              })

    #cookies
    cookiestring = response.cookiestring
    print cookiestring

    #获取表单token
    response = urlfetch.fetch("http://m.fanfou.com/home",
                              headers={
                                  'Cookie': cookiestring,
                                  'Referer': "http://m.fanfou.com/home",
                              })
    token = re.search('''name="token".*?value="(.*?)"''',
                      response.body).group(1)

    #发布状态
    response = urlfetch.fetch("http://m.fanfou.com/",
                              data={
                                  'content': status,
                                  'token': token,
                                  'action': 'msg.post',
                              },
                              headers={
                                  'Cookie': cookiestring,
                                  'Referer': "http://m.fanfou.com/home",
                              })
Ejemplo n.º 46
0
 def update(self, status):
     response = fetch("http://m.fanfou.com/home",
                      headers={
                          'Cookie': self.cookies,
                          'Referer': "http://m.fanfou.com/home",
                      })
     token = re.search('''name="token".*?value="(.*?)"''',
                       response.body).group(1)
     response = fetch("http://m.fanfou.com/",
                      data={
                          'content': status,
                          'token': token,
                          'action': 'msg.post',
                      },
                      headers={
                          'Cookie': self.cookies,
                          'Referer': "http://m.fanfou.com/home",
                      })
     return response.body
Ejemplo n.º 47
0
    def login(self):
        response = fetch("http://m.fanfou.com/")
        token = re.search('''name="token".*?value="(.*?)"''',
                          response.body).group(1)

        response = fetch("http://m.fanfou.com/",
                         data={
                             'loginname': self.username,
                             'loginpass': self.password,
                             'action': 'login',
                             'token': token,
                             'auto_login': '******',
                         },
                         headers={
                             "Referer": "http://m.fanfou.com/",
                         })
        set_cookie = response.getheader('Set-Cookie')
        self.cookies = sc2cs(set_cookie)
        return response.body
Ejemplo n.º 48
0
    def login(self):
        response = fetch("http://m.facebook.com/")

        m = re.search('''name="post_form_id" value="([^"]+?)"''',
                      response.body)
        self.post_form_id = m.group(1)

        response = fetch(
            "https://www.facebook.com/login.php?m=m&refsrc=http%3A%2F%2Fm.facebook.com%2F&refid=0",
            data={
                'lsd': 'off',
                'charset_test': "€,´,€,´,水,Д,Є",
                'version': '1',
                'ajax': '1',
                'width': '1280',
                'pxr': '1',
                'email': self.username,
                'pass': self.password,
                'submit': 'Log In',
                'post_form_id': self.post_form_id,
            },
            headers={
                'Referer': 'http://m.facebook.com/',
            })

        set_cookie = response.getheader('Set-Cookie')
        self.cookies = sc2cs(set_cookie)

        url = response.getheader('location')
        response = fetch(
            url,
            headers={
                'Referer': 'http://m.facebook.com/',
                'Cookie': self.cookies,
            },
        )

        self.post_form_id = re.search(
            '''name="post_form_id" value="([^"]+?)"''', response.body).group(1)
        self.fb_dtsg = re.search('''name="fb_dtsg" value="([^"]+?)"''',
                                 response.body).group(1)

        return response
Ejemplo n.º 49
0
def webtoon_comics():
    with urlfetch.fetch(WEBTOON_LIST_URL, cache, 120) as f:
        html = lxml.html.parse(f)
    links = html.xpath('//*[@id="content"]//*[@class="section"]/ul/li/a')
    for a in links:
        title = a.attrib['title']
        href = a.attrib['href']
        query = href[href.index('?') + 1:]
        title_id = int(werkzeug.urls.url_decode(query)['titleId'])
        yield title_id, title
Ejemplo n.º 50
0
 def send_request(self, request):
   resp = urlfetch.fetch(
     url=request.url,
     headers=request.headers,
     method=request.method,
     data=request.body,
     deadline=self.config.timeout,
     **self.options
   )
   return process_response(resp.status_code, resp.content)
Ejemplo n.º 51
0
def get_block(height):
  if not height:
    return None
  url = BASE_BLOCKCHAIN_URL + '/block-height/%s?format=json' % (height)
  result = urlfetch.fetch(url)
  if result.status_code == 200:
    return json.loads(result.content).get('blocks')[0]
  else:
    logging.error('There was an error contacting the Blockchain.info API')
    return None
def get_pdfium_sha(chromium_sha):
    """Gets the correct Pdfium sha using the Chromium sha."""
    response = urlfetch.fetch(
        ('https://chromium.googlesource.com/chromium/src.git/+/%s/DEPS?'
         'format=TEXT' % chromium_sha))
    body = base64.b64decode(response.body)
    sha_line = [l for l in body.split('\n') if "'pdfium_revision':" in l][0]
    sha_line = sha_line.translate(None, string.punctuation).replace(
        'pdfiumrevision', '')
    return sha_line.strip()
Ejemplo n.º 53
0
def submit (recaptcha_challenge_field,
            recaptcha_response_field,
            private_key,
            remoteip):
    """
    Submits a reCAPTCHA request for verification. Returns RecaptchaResponse
    for the request

    recaptcha_challenge_field -- The value of recaptcha_challenge_field from the form
    recaptcha_response_field -- The value of recaptcha_response_field from the form
    private_key -- your reCAPTCHA private key
    remoteip -- the user's ip address
    """

    if not (recaptcha_response_field and recaptcha_challenge_field and
            len (recaptcha_response_field) and len (recaptcha_challenge_field)):
        return RecaptchaResponse (is_valid = False, error_code = 'incorrect-captcha-sol')
    
    headers = {
               'Content-type':  'application/x-www-form-urlencoded',
               "User-agent"  :  "reCAPTCHA GAE Python"
               }         
    
    params = urllib.urlencode ({
	    'privatekey': private_key,
        'remoteip' : remoteip,
	    'challenge': recaptcha_challenge_field,
	    'response' : recaptcha_response_field,
	    })

    httpresp = urlfetch.fetch(
                   url      = "http://%s/verify" % VERIFY_SERVER,
                   payload  = params,
                   method   = urlfetch.POST,
                   headers  = headers
                    )     
    
    if httpresp.status_code == 200:
        # response was fine
        
        # get the return values
        return_values = httpresp.content.splitlines();
        
        # get the return code (true/false)
        return_code = return_values[0]
        
        if return_code == "true":
            # yep, filled perfectly
            return RecaptchaResponse (is_valid=True)
        else:
            # nope, something went wrong
            return RecaptchaResponse (is_valid=False, error_code = return_values [1])
    else:
        # recaptcha server was not reachable
        return RecaptchaResponse (is_valid=False, error_code = "recaptcha-not-reachable")
Ejemplo n.º 54
0
def sms():
    params = {
        'api_key': 'c76e6310',
        'api_secret': 'e801a86852c6efd7',
        'to': '1484929####',
        'from': '1267405####',
        'text': 'try from pennapps'
    }
    url = 'https://rest.nexmo.com/sms/json?' + urllib.urlencode(params)
    res = urlfetch.fetch(url, method='GET')
    return json.loads(res)
Ejemplo n.º 55
0
 def update(self, status):
     response = fetch("http://m.douban.com/",
                      data={
                          'mb_text': status,
                          'session': self.session
                      },
                      headers={
                          'Referer': 'http://m.douban.com/',
                          'Cookie': self.cookies,
                      })
     return response
Ejemplo n.º 56
0
def storeRDF(graph):

    data = graph.serialize(format='xml')
    url = rdfStoreUrl + "/statements"

    jsonresult = urlfetch.fetch(
        url,
        payload=data,
        deadline=30,
        method=urlfetch.POST,
        headers={'content-type': 'application/rdf+xml'})
Ejemplo n.º 57
0
def resolve_url(url):
    headers = HTTP_DESKTOP_UA
    cookie = Cookie.SimpleCookie()

    form_fields = {
        "username": __settings__.getSetting('username'),
        "password": __settings__.getSetting('password')
    }

    form_data = urllib.urlencode(form_fields)

    response = urlfetch.fetch(url='http://up.4share.vn/index/login',
                              method='POST',
                              headers=headers,
                              data=form_data,
                              follow_redirects=False)

    cookie.load(response.headers.get('set-cookie', ''))
    headers['Cookie'] = _makeCookieHeader(cookie)

    response = urlfetch.fetch(url, headers=headers, follow_redirects=True)
    if response.status == 302 and response.headers['location'].find(
            'logout.php') < 0:
        url = response.headers['location']
    else:
        soup = BeautifulSoup(response.content,
                             convertEntities=BeautifulSoup.HTML_ENTITIES)
        for a in soup.findAll('a'):
            try:
                if a['href'].find('?info') > 0:
                    url = a['href']
                    item = xbmcgui.ListItem(path=url)
                    xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, item)
                    return
            except:
                pass

        xbmc.executebuiltin(
            (u'XBMC.Notification("%s", "%s", %s)' %
             ('Authentication', 'Please check your 4share username/password',
              '15')).encode("utf-8"))