Ejemplo n.º 1
0
def fetch_core(uid, access_token):

    uid = str(uid)

    # Initialize the needed modules
    CHandler = urllib2.HTTPCookieProcessor(cookielib.CookieJar())
    browser = urllib2.build_opener(CHandler)
    browser.addheaders = [('User-agent', 'InFB - [email protected] - http://ruel.me')]
    urllib2.install_opener(browser)

#     print 'Using access token: %s' % access_token + '\n'

    url = 'https://graph.facebook.com/%s?access_token=%s' % (uid, access_token)
    print url
    res = browser.open(url)
    fres = res.read()
    jdata = json.loads(fres)
    fres = json.dumps(jdata, ensure_ascii=False)

    FileUtility.write('data/profile/%s.json' % uid,fres)

# fetch profile picture

    if os.path.isfile('data/profile/%s_picture_large.png'%uid) == False:
        f = open('data/profile/%s_picture_large.png'%uid,"wb")
	f.write(urllib.urlopen('https://graph.facebook.com/%s/picture?type=large'%uid ).read())
	f.close()

    if os.path.isfile('data/profile/%s_picture.png'%uid) == False:
	f = open('data/profile/%s_picture.png'%uid,"wb")
	f.write(urllib.urlopen('https://graph.facebook.com/%s/picture'%uid ).read())
	f.close()
Ejemplo n.º 2
0
def fetch_core(file_dir, url, access_token):

    # Initialize the needed modules
    CHandler = urllib2.HTTPCookieProcessor(cookielib.CookieJar())
    browser = urllib2.build_opener(CHandler)
    browser.addheaders = [('User-agent', 'InFB - [email protected] - http://ruel.me')]
    urllib2.install_opener(browser)

#     print 'Using access token: %s' % access_token + '\n'

    url = 'https://graph.facebook.com/'+url+ ('?limit=100&access_token=%s' % access_token)
    print url
    res = browser.open(url)
    fres = res.read()
    jdata = json.loads(fres)
    fres = json.dumps(jdata, ensure_ascii=False)

    i=1 
    FileUtility.write(file_dir+"/data"+str(i)+".json",fres)
    while len(jdata['data'])>0:

	data = jdata['data']
	for sub_data in data:
	    pid = sub_data['id']
	    FetchPostsDetail.fetch_core(pid,access_token, browser)
	
	url = jdata['paging']['next']
	print url
	res = browser.open(url)
	fres = res.read()
	jdata = json.loads(fres)
	fres = json.dumps(jdata, ensure_ascii=False)

	i=i+1 
	FileUtility.write(file_dir+"/data"+str(i)+".json", fres)
Ejemplo n.º 3
0
def fetch_core(post_id, access_token, browser):
    
    if os.path.isdir("data/posts/"+post_id) == False:
        os.mkdir("data/posts/"+post_id)
    else:
	print 'File has aleary existed, skip!!'
	return

    print 'In FetchPostsDetail.py parse' + post_id 

    url = 'https://graph.facebook.com/'+post_id+ ('&access_token=%s' % access_token)
    res = try_brower(browser, url)
    fres = res.read()
    jdata = json.loads(fres)
    fres = json.dumps(jdata, ensure_ascii=False)
    FileUtility.write("data/posts/"+post_id+"/content.json",fres)

    url = 'https://graph.facebook.com/'+post_id+'/likes'+ ('&access_token=%s' % access_token)
    res = try_brower(browser, url)
    fres = res.read()
    jdata = json.loads(fres)
    fres = json.dumps(jdata, ensure_ascii=False)
    FileUtility.write("data/posts/"+post_id+"/likes.json",fres)


    url = 'https://graph.facebook.com/'+post_id+'/comments'+ ('&access_token=%s' % access_token)
    res = try_brower(browser, url)
    fres = res.read()
    jdata = json.loads(fres)
    fres = json.dumps(jdata, ensure_ascii=False)
    FileUtility.write("data/posts/"+post_id+"/comments.json",fres)