コード例 #1
0
ファイル: twitter_tasks.py プロジェクト: kingsBSD/SocialMiner
def doDefaultScrape(latest=False):
    """Retrieve the tweets, friends or followers of trhe next users in the default scrape."""
    keepGoing = cache.get('default_scrape')
    if (not keepGoing) or keepGoing <> 'true':
        print '*** STOPPED DEFAULT SCRAPE ***' 
        return False
    
    print '*** SCRAPING... ***'

    thisFriend = cache.get('scrape_friends')
    if (not thisFriend) or thisFriend == 'done':
        cache.set('scrape_friends','running')
        getTwitterConnections.delay(whoNext('friends',latest=latest),cacheKey='scrape_friends')
    else:
        print '*** FRIENDS BUSY ***'

    thisFollower = cache.get('scrape_followers')
    if (not thisFollower) or thisFollower == 'done':
        cache.set('scrape_followers','running')
        getTwitterConnections.delay(whoNext('friends',latest=latest),friends=False,cacheKey='scrape_followers')
    else:
        print "*** FOLLOWERS BUSY ***"

    thisTweet = cache.get('scrape_tweets')
    if (not thisTweet) or thisTweet == 'done':
        cache.set('scrape_tweets','running')
        getTweets.delay(whoNext('tweets',latest=latest),maxTweets=1000,cacheKey='scrape_tweets')
    else:
        print '*** TWEETS BUSY ***'
                    
    doDefaultScrape.apply_async(kwargs={'latest':latest},countdown=30)
コード例 #2
0
ファイル: twitter_tasks.py プロジェクト: picomeg/SocialMiner
def doDefaultScrape(latest=False):
    """Retrieve the tweets, friends or followers of trhe next users in the default scrape."""
    keepGoing = cache.get('default_scrape')
    if (not keepGoing) or keepGoing <> 'true':
        print '*** STOPPED DEFAULT SCRAPE ***'
        return False

    print '*** SCRAPING... ***'

    thisFriend = cache.get('scrape_friends')
    if (not thisFriend) or thisFriend == 'done':
        cache.set('scrape_friends', 'running')
        getTwitterConnections.delay(whoNext('friends', latest=latest),
                                    cacheKey='scrape_friends')
    else:
        print '*** FRIENDS BUSY ***'

    thisFollower = cache.get('scrape_followers')
    if (not thisFollower) or thisFollower == 'done':
        cache.set('scrape_followers', 'running')
        getTwitterConnections.delay(whoNext('friends', latest=latest),
                                    friends=False,
                                    cacheKey='scrape_followers')
    else:
        print "*** FOLLOWERS BUSY ***"

    thisTweet = cache.get('scrape_tweets')
    if (not thisTweet) or thisTweet == 'done':
        cache.set('scrape_tweets', 'running')
        getTweets.delay(whoNext('tweets', latest=latest),
                        maxTweets=1000,
                        cacheKey='scrape_tweets')
    else:
        print '*** TWEETS BUSY ***'

    doDefaultScrape.apply_async(kwargs={'latest': latest}, countdown=30)
コード例 #3
0
ファイル: twitter_tasks.py プロジェクト: picomeg/SocialMiner
def doUserScrape():
    """Retrieve the next timelines, friends and followers for the next accounts in the user scrape. """
    keepGoing = cache.get('user_scrape')
    if (not keepGoing) or keepGoing <> 'true':
        print '*** STOPPED USER SCRAPE ***'
        return False

    user = cache.get('scrape_user')
    print '*** SCRAPING USER: '******'... ***'

    thisFriend = cache.get('scrape_friends')
    if (not thisFriend) or thisFriend == 'done':
        nextFriends = nextNearest(user, 'friends')
        if nextFriends:
            cache.set('scrape_friends', 'running')
            getTwitterConnections.delay(nextFriends, cacheKey='scrape_friends')
    else:
        print '*** FRIENDS BUSY ***'

    thisFollower = cache.get('scrape_followers')
    if (not thisFollower) or thisFollower == 'done':
        nextFollowers = nextNearest(user, 'followers')
        if nextFollowers:
            cache.set('scrape_followers', 'running')
            getTwitterConnections.delay(nextFollowers,
                                        friends=False,
                                        cacheKey='scrape_followers')
    else:
        print '*** FOLLOWERS BUSY ***'

    thisTweet = cache.get('scrape_tweets')
    if (not thisTweet) or thisTweet == 'done':
        nextTweets = nextNearest(user, 'tweets')
        if nextTweets:
            cache.set('scrape_tweets', 'running')
            getTweets.delay(nextTweets,
                            maxTweets=1000,
                            cacheKey='scrape_tweets')
    else:
        print '*** TWEETS BUSY ***'

    if 'running' in [
            cache.get(k)
            for k in ['scrape_friends', 'scrape_followers', 'scrape_tweets']
    ]:
        doUserScrape.apply_async(countdown=30)
    else:
        cache.set('user_scrape', '')
        cache.set('scrape_mode', '')
        print '*** FINISHED SCRAPING USER: '******' ***'
コード例 #4
0
ファイル: twitter_tasks.py プロジェクト: kingsBSD/SocialMiner
def doUserScrape():
    """Retrieve the next timelines, friends and followers for the next accounts in the user scrape. """
    keepGoing = cache.get('user_scrape')
    if (not keepGoing) or keepGoing <> 'true':
        print '*** STOPPED USER SCRAPE ***' 
        return False
    
    user = cache.get('scrape_user')
    print '*** SCRAPING USER: '******'... ***'

    thisFriend = cache.get('scrape_friends')
    if (not thisFriend) or thisFriend == 'done':
        nextFriends = nextNearest(user,'friends')
        if nextFriends:
            cache.set('scrape_friends','running')
            getTwitterConnections.delay(nextFriends, cacheKey='scrape_friends')
    else:
        print '*** FRIENDS BUSY ***'

    thisFollower = cache.get('scrape_followers')
    if (not thisFollower) or thisFollower == 'done':
        nextFollowers = nextNearest(user,'followers')
        if nextFollowers:
            cache.set('scrape_followers','running')
            getTwitterConnections.delay(nextFollowers,friends=False, cacheKey='scrape_followers')
    else:
        print '*** FOLLOWERS BUSY ***'

    thisTweet = cache.get('scrape_tweets')
    if (not thisTweet) or thisTweet == 'done':
        nextTweets = nextNearest(user,'tweets')
        if nextTweets:
            cache.set('scrape_tweets','running')
            getTweets.delay(nextTweets,maxTweets=1000,cacheKey='scrape_tweets')
    else:
        print '*** TWEETS BUSY ***'

    if 'running' in [ cache.get(k) for k in ['scrape_friends','scrape_followers','scrape_tweets'] ]:
        doUserScrape.apply_async(countdown=30)
    else:
        cache.set('user_scrape','')
        cache.set('scrape_mode','')        
        print '*** FINISHED SCRAPING USER: '******' ***'