Beispiel #1
0
def get_application_access_token(app_id, app_secret):
    ''' Get an application access token, as described here:
        http://developers.facebook.com/docs/authentication/#authenticating-as-an-application 
        
        Some graph API calls require one of thse.  
    '''
        
    url = FB_GRAPH_BASE_URL
    url += "/oauth/access_token?"
    url += "client_id=%s" % app_id
    url += "&client_secret=%s" % app_secret
    url += "&grant_type=client_credentials"
    
    s = net.get(url)
    
    if s:
        # in the body of the response will be access_token=
        prefix = "access_token="
        if not s.startswith(prefix):
            logger.error("Got malformed app access token response: '%s'" % s)
        x = len(prefix)
        access_token = s[x:]
        logger.debug("Got application access token: %s" % access_token)
        return access_token
        
    else:
        logger.error("Failed to get application access token")
        return none
Beispiel #2
0
    def fetch(self, operation, operation_params):
        ''' fetch document, return file-like object as response '''
        
        url = self.construct_url(operation, operation_params)
        
        if self.printurl:
            logger.info(url)

        # Google App Engine supports Python 2.7 as of release 1.6.0.  However, there is an outstanding bug (#6271) in their standard library facade over the urlfetch
        # service, so use urlfetch directly if we're on GAE.
        f = net.get(url)
        return f
Beispiel #3
0
def get_fql(url):
    """ Generic helper to execute an FQL request and handle error codes.  """

    # record time to get FQL data.
    t1 = time.time()

    s = net.get(url)

    if not s:
        raise NetException("Failed to execute FQL request, url=%s" % url)

    # should have json.  check for error or success now.
    o = json.loads(s)

    # logger.info("****FQL:")
    # logger.info(o)
    # logger.info("*************")

    # type checking is ghetto, but FQL will return a JSON dictionary on error and a potentially a list otherwise.  If it's not a dictionary,
    # skip further error checking.

    if isinstance(o, type({})):
        try:

            error_code = o["error_code"]

            try:
                error_msg = o["error_msg"]
            except KeyError:
                error_msg = ""

            if error_code == 190:  # Invalid OAuth 2.0 token
                raise InvalidOAuthException("Invalid auth: error_msg='%s', url=%s" % (error_msg, url))

            else:
                raise Exception("FQL error code: %d, error_msg='%s', url=%s" % (error_code, error_msg, url))

        except KeyError:
            # no error.  should have successfully retrieved friends json.
            pass

    # success

    t2 = time.time()
    x = t2 - t1
    logger.info("get_fql: took %0.2f seconds" % x)

    return o
Beispiel #4
0
def get_wall_posts(user_id, access_token):
    ''' requires read_stream to get non-public posts 
    
        Should return a json dictionary containing:
            data - list of posts (maybe up to 50)
            paging - links to get more "pages" of posts
    '''
    
    url = "FB_GRAPH_BASE_URL"
    url += "/%s/feed?access_token=%s" % (user_id, access_token)
    s = net.get(url)
    logger.info("Got user's posts: %s" % s)
    if s:
        o = json.loads(s)
        return o
    else:
        return None
Beispiel #5
0
def delete_test_user(uid, access_token):
    ''' Delete the test user '''
    
    url = FB_GRAPH_BASE_URL
    url += "/%s?method=delete&access_token=%s" % (uid, access_token)
    
    s = net.get(url)
    
    if s:
        if s == 'true':
            logger.debug("Deleted test user")
            return True
        else:
            logger.error("Failed to delete test user, response='%s'" % s)
            return None
    else:
        logger.error("Failed to delete test user")
        return False
Beispiel #6
0
def list_test_users(app_id, app_access_token):
    ''' List test users associated with this application '''
    
    url = "FB_GRAPH_BASE_URL"
    url += "/%s/accounts/test-users" % app_id
    url += "?access_token=%s" % app_access_token
        
    s = net.get(url)
    
    if s:
        logger.debug("List test users, response: '%s'" % s)
        o = json.loads(s)
        users = o["data"]
        return users
        
    else:
        logger.error("Failed to list test users")
        return None
Beispiel #7
0
    def _crawl(self):
        item = self._site["task"]
        md5 = item["md5"]
        url = item["url"]
        last_modified = item["last_modified"]

        headers = {}
        #有last_modified, 且已抓取过的,请求头要加上Last-Modified
        if item["state"] == CONFIG.G_STATUS_CRAWLED and last_modified != "":
            headers["Last-Modified"] = last_modified
        (head, html) = net.get(url, headers, d_config=self._site["config"])
        self._save_html(md5, html)

        if int(head["code"]) == 200:
            ret = self._analyze_html(url, html)
            self._update_state(CONFIG.G_STATUS_CRAWLED, {"md5": md5})
            return ret
        else:
            self._update_state(CONFIG.G_STATUS_ERROR, {"md5": md5})
            return "spider.run: crawl %s %s" % (head["code"], url)
Beispiel #8
0
	def _crawl(self):
		item = self._site["task"]
		md5 = item["md5"]
		url = item["url"]
		last_modified = item["last_modified"]

		headers = {}
		#有last_modified, 且已抓取过的,请求头要加上Last-Modified
		if item["state"] == CONFIG.G_STATUS_CRAWLED and last_modified != "":
			headers["Last-Modified"] = last_modified
		(head, html) = net.get(url, headers, d_config=self._site["config"])
		self._save_html(md5, html)

		if int(head["code"]) == 200:
			ret = self._analyze_html(url, html)
			self._update_state(CONFIG.G_STATUS_CRAWLED, {"md5":md5})
			return ret
		else:
			self._update_state(CONFIG.G_STATUS_ERROR, {"md5":md5})
			return "spider.run: crawl %s %s"%(head["code"], url)
Beispiel #9
0
def get_friends_list(user_id, access_token, fields=["id"], offset=0, limit=None):
    ''' Get list of the user's friends
    
        fields - list of fields to retrieve. e.g. (name, id)
    '''
    fields = ",".join(fields)
    
    url = "FB_GRAPH_BASE_URL"
    url += "/%s/friends?access_token=%s&fields=%s" % (user_id, access_token, fields)
    if offset:
        url += "&offset=%d" % offset
    if limit:
        url += "&limit=%d" % limit
        
    s = net.get(url)

    if s:
        o = json.loads(s)
        return o["data"]    # data element contains a list of dictionary objects repesenting the friends
    else:
        return None
Beispiel #10
0
    def _run(self, task_data):
        url = task_data["url"]
        domain = url.split("/")[2]
        d_config = CONFIG.G_SITE[domain]
        default_code = d_config["default_code"]

        html = None
        if CONFIG.G_IFSAVE_HTML == True:
            html = self._html_from_db(task_data["md5"], CONFIG.G_TABLE_HTML)
            if html:
                default_code = "utf-8"

        if not html:
            (header, html) = net.get(url, d_config=d_config)
            if not header["code"] == 200:
                self._pick_state(task_data["md5"],\
                 CONFIG.G_STATE_ERROR, CONFIG.G_TABLE_LINK)
                return (CONFIG.G_STATE_NET_ERROR, (0, 0))

        count = self._pick(d_config, html, task_data, default_code)
        return (CONFIG.G_STATE_PICKED, count)
Beispiel #11
0
def get_users(user_ids, access_token, fields=None, callback=None):
    ''' Get info about a list of users 
    
        callback - if specific, request will be done asynchronously and a handle to the rpc object will be returned
    '''
    
    user_ids = ",".join(user_ids)
    
    if fields:
        fields = ",".join(fields)
    else:
        fields = USER_COLUMN_CLAUSE
    
    url = "FB_GRAPH_BASE_URL"
    url += "/?ids=%s&access_token=%s&fields=%s" % (user_ids, access_token, fields)

    logger.debug("get_users: url=%s" % url)
    
    def result_callback(result):
        if result:
            o = json.loads(result)
            return callback(o)
        else:
            return callback(None)

    if callback:
        # async
        rpc = net.get_async(url, result_callback)
        return rpc
    else:
        # blocking
        s = net.get(url)
    
        if s:
            o = json.loads(s)
            return o
        else:
            return None
Beispiel #12
0
def authenticate_app(app_id, app_secret, redirect_url, code):
    ''' Authenticate app.  (last step of server-side flow authentication process) '''
    
    url = "%s/oauth/access_token?client_id=%s&redirect_uri=%s&client_secret=%s&code=%s" % (FB_GRAPH_BASE_URL, app_id, redirect_url, app_secret, code)
    
    logger.info("autenticate_app: code='%s'" % code)    
    logger.info("authenticate_app: url=%s" % url)
    
    s = net.get(url)
    logger.info("authenticate_app: Got '%s'" % s)
    
    # s is a string of the form:
    # 'access_token=AAAENJAf4kUUBAFnyAOxBpLD2O5nJvG5eD4X2ZBKAaZAl23ZA82qExyRZAtO6yILsVgyw963UPvfiOG9wUiHRCiyK5MXOSO5gbZBBdfaPSPu7liNJA9pG7&expires=5242'
    
    # split on & to divide up the 2 useful bits.
    (atok, etok) = s.split("&")
    
    # split each on the equals:
    access_token = atok.split("=")[1]
    expires = etok.split("=")[1]
    
    # expires == number of seconds until token expires.
    return (access_token, expires)
Beispiel #13
0
	def _run(self, task_data):
		url = task_data["url"]
		domain = url.split("/")[2]
		d_config = CONFIG.G_SITE[domain]
		default_code = d_config["default_code"]

		html = None
		if CONFIG.G_IFSAVE_HTML == True:
			html = self._html_from_db(task_data["md5"], CONFIG.G_TABLE_HTML)
			if html:
				default_code = "utf-8"

		if not html:
			(header, html) = net.get(url, d_config=d_config)
			if not header["code"] == 200:
				self._pick_state(task_data["md5"],\
					CONFIG.G_STATE_ERROR, CONFIG.G_TABLE_LINK)
				return (CONFIG.G_STATE_NET_ERROR, (0, 0))


		self.ext_before_pick(html, url)
		count = self._pick(d_config, html, task_data, default_code)
		return (CONFIG.G_STATE_PICKED, count)
Beispiel #14
0
from win10toast import ToastNotifier
import stocks.company
import net

base_url = 'http://www.tsetmc.com/tsev2/data/instinfodata.aspx?i={id}&c=67%20'
comLis = stocks.company.load_all()

toaster = ToastNotifier()
f = open('safe_res.txt', 'w', encoding='utf-8')

while True:
    for v in comLis:
        addr = base_url.format(id=v.id)
        res = net.get(addr)
        vec_data = res.text.split(',')
        #
        if len(vec_data) < 4:
            continue
        #
        yesterday_price = int(vec_data[5])
        last_price = int(vec_data[3])
        if last_price <= yesterday_price:
            continue
        current_price = int(vec_data[2])
        ratio = current_price / last_price - 1
        if (ratio < -0.04):
            #f.write(str(ratio) + ' : ' + v.name + '\n')
            #f.flush()
            toaster.show_toast("IR-Market", v.name)
            print('--------------')
        print(v.id, '|', ratio, ':', last_price, current_price)
Beispiel #15
0
import net

url_tmpl = 'https://rahavard365.com/asset/{}/%D8'

li = []

c = 0
for i in range(4, int(2e4)):
    res = net.get(url_tmpl.format(i)).text
    try:
        loc = res.find('last_pb')
        if loc != -1:
            loc_e = res.find('date', loc)
            #print(i, ':', res[loc+18:loc_e-2])
            pb = float(res[loc + 18:loc_e - 2])
            li.append((i, pb))
    except:
        print('err at', i)
    c += 1
    if c == 100:
        print('.')
        c = 0
li.sort(key=lambda x: x[1])
print(li)

with open('res.txt', 'w') as f:
    col_width = max(len(str(word)) for row in li
                    for word in row) + 2  # padding
    for tup in li:
        f.write("".join(str(word).ljust(col_width)
                        for word in tup[:-1]) + str(tup[-1]) + '\n')
Beispiel #16
0
def had_constant_price(prices) -> bool:
    if len(prices) == 0:
        return True
    pre_price = prices[0]
    for price in prices:
        if price != pre_price:
            return False
        pre_price = price
    return True

final_list = []

for com in comLis:
    try:
        # tset
        tset_vec_data = net.get(tset_base_url.format(id=com.id)).text.split(',')
        last_price = int(tset_vec_data[3])
        yesterday_price = int(tset_vec_data[5])
        # tadbirrlc
        url = base_url.format(symbol=com.symbol)
        prices = [vec[3] for vec in net.get(url).json()['olst'][5]]
        if had_constant_price(prices):
            print(com.symbol)
            final_list.append((last_price/yesterday_price, com))
    except KeyboardInterrupt:
        exit()
    except:
        continue

final_list.sort(key = lambda x: x[0])