Example #1
0
def insert_all_users( fname ):
    with setup_mysql_cxn() as cxn:
        cursor = cxn.cursor()
        api_client = twitterMine.setup()
        user_ids = load_user_ids(fname)
        print(user_ids)
        for user in user_ids:
            print(user)
            # Check -if we have already downloaded user description
            cohort_id = user_ids[user];
            download_needed = True
            try: 
                cursor.execute(GET_USER_TEMPL % user)
                results = get_named_rows(cursor)
                if len(results) >= 1:
                    utils.debug("User already fetched.")
                    if results[0]['cohort_id'] != str(cohort_id):
                        utils.debug("Updating Cohort for %s" % user)
                        tmpl = "UPDATE tusers SET cohort_id = %s where twitter_user_id = %s";
                        cursor.execute(tmpl % (cohort_id, user))
                        
                    download_needed = False
            except Exception as dbe:
                utils.debug("dbError looking up %s" % str(user)) 
                utils.debug("Info: %s" % str(dbe))
            # Otherwise download and insert
            if download_needed: 
                download_and_insert(api_client, cursor, user, cohort_id)
            cxn.commit()
def download_neighborhood( id_file, out_file, bfs_depth ):
	print( " building neighborhood network " )

	ids = None
	with open(id_file, 'r') as id_f:
		ids = [ x.strip() for x in id_f if len(x.strip()) > 0 ] 
	
	if ids is None or len(ids) == 0:
		print ("could not read %s" % id_file)
		sys.exit(1)
	
	print( "querying twitter for user descriptions. This may take a while." )
	
	twitter_client = twitterMine.setup() 
	old_results = {}
	if os.path.isfile(out_file):
		with open(out_file, r) as out_f:
			old_results = json.load(out_f)
	results = bfs( twitter_client, ids, bfs_depth, old_results ); 

	print( "done! writing results to %s" % out_file )
	
	with open( out_file ) as out_f:
		json.dump( results, out_f )

	print( "success! exiting" )
Example #3
0
def insert_all_timelines(fname):
    with setup_mysql_cxn() as cxn:
        cursor = cxn.cursor()
        api_client = twitterMine.setup()
        user_ids = load_user_ids(fname)

        for user in user_ids:
            user_loaded = None
            try: 
                cursor.execute("""SELECT loaded FROM 
                    tusers where twitter_user_id = '%s'""" % user)
                user_loaded = get_named_rows(cursor)
            except Exception as e:
                utils.debug("error looking up %s" % user)
                utils.debug("Error message: %s" % str(e))
                user_loaded = []
            
            if len(user_loaded) == 0:
                utils.debug("Error -- user not already fetched %s" % user)
                continue
            elif user_loaded[0]['loaded'] == 1:
                utils.debug("%s, already loaded. Skipping" % user)
                continue
            else:
                try:
                    load_user_timeline(cursor, api_client, user)
                except twython.TwythonAuthError as e:
                    utils.debug('not authed, skipping %s' % user )
                except twython.TwythonError as e:
                    utils.debug("Error: twython error: %s" % str(e))
                    utils.debug("Sleeping 10m")
                    time.sleep(10 * 60)
                    api_client = twitterMine.setup()
                    try:
                        load_user_timeline(cursor, api_client, user)
                    except twython.TwythonAuthError as e:
                        utils.debug('not authed, skipping %s' % user )
                    except twython.TwythonError as e:
                        utils.debug("Error: twython error: %s" % str(e))
                        utils.debug("giving up")
            cxn.commit()
        cursor.close()
        cxn.commit()