def load_tweets(): since = Tweet.objects.all().aggregate(Max('twitter_id')) if since: since = since['twitter_id__max'] #conn = httplib.HTTPConnection("search.twitter.com") #conn = httplib.HTTPConnection("api.twitter.com") consumer = oauth2.Consumer(key=settings.TWITTER_CONSUMER_KEY, secret=settings.TWITTER_CONSUMER_SECRET) token = oauth2.Token(key=settings.TWITTER_TOKEN_KEY, secret=settings.TWITTER_TOKEN_SECRET) conn = oauth2.Client(consumer, token) params = {'q': '@ewb OR #ewb OR from:ewb OR from:worldofewb'} # add @ewb OR #ewb as query terms params['result_type'] = 'recent' # want all recent tweets, not only popular ones... params['count'] = 100 if since: params['since_id'] = since # since is a tweet ID """ #conn.request("GET", "/search.json?" + urllib.urlencode(params)) conn.request("GET", "/1.1/search/tweets.json?" + urllib.urlencode(params)) response = conn.getresponse() print "status", response.status if response.status != 200: return False data = response.read() conn.close() print data """ status, data = conn.request('https://api.twitter.com/1.1/search/tweets.json?' + urllib.urlencode(params), 'GET') if status['status'] != "200": return False result = json.loads(data) result['statuses'].reverse() for r in result['statuses']: tzdelta = datetime.now() - datetime.utcnow() parsed_date = datetime.strptime(r['created_at'], '%a %b %d %H:%M:%S +0000 %Y') + tzdelta tweet = None # retweet? see what we can find...! text = r['text'] if text[0:4] == 'RT @': try: original_tweeter = text.split(':')[0].split('@')[1].strip() original_tweet = text.split(':', 1)[1].strip() except IndexError: # mis-formed retweets...? original_tweeter = text.split('@')[1].split(' ', 1)[0].strip() original_tweet = text.split('@')[1].split(' ', 1)[1].strip() tweet = get_object_or_none(Tweet, author_username=to_ascii(original_tweeter), text=to_ascii(original_tweet)) if tweet: tweet.retweet(r) else: tweet, created = Tweet.objects.get_or_create(twitter_id=r['id_str'], defaults={'text': r['text'], 'author_name': r['user']['name'], 'author_username': r['user']['screen_name'], 'author_userid': r['user']['id'], 'author_image': r['user']['profile_image_url'], 'date': parsed_date}) if created: # force creation of container... container = Cheers.objects.get_container(tweet) container.count = 1 container.latest = parsed_date container.save()
def run(): mc = MailChimp(key) list = settings.MAILCHIMP_LISTID # ---------------------------------------------------------------------- # handle unsubscribes first emails = [] unsub = ListEvent.objects.filter(subscribe=False) for u in unsub: print "unsubscribing", to_ascii(u.user.visible_name()), u.email emails.append(u.email) u.delete() if len(emails): result = mc.listBatchUnsubscribe(id=list, emails=emails, delete_member=True, send_goodbye=False, send_notify=False) print_result(result) # ---------------------------------------------------------------------- # subscribe new people # (actually, this should never be used... since new subscriptions have # been rolled into ProfileEvents) emails = [] sub = ListEvent.objects.filter(subscribe=True) for s in sub: print "subscribing", to_ascii(s.user.visible_name()), s.user.email entry = build_profile(s.user) entry['GROUPINGS'] = build_new_groups(s.user) emails.append(entry) s.delete() if len(emails): result = mc.listBatchSubscribe(id=list, batch=emails, double_optin=False, update_existing=False) print_result(result) # ---------------------------------------------------------------------- # profile info updates # handle email address changes separately, since we can't batch those profile = ProfileEvent.objects.filter(email__isnull=False) for p in profile: if p.email: print "updating with new email", to_ascii(p.user.visible_name()), p.email, p.user.email entry = build_profile(p.user) entry['GROUPINGS'] = build_new_groups(p.user) p.delete() result = mc.listSubscribe(id=list, email_address=p.email, merge_vars=entry, double_optin=False, send_welcome=False, update_existing=True, replace_interests=False) print result # and everything else profile = ProfileEvent.objects.all() for p in profile: print "updating", to_ascii(p.user.visible_name()), p.user.email entry = build_profile(p.user) entry['GROUPINGS'] = build_new_groups(p.user) emails.append(entry) p.delete() if len(emails): result = mc.listBatchSubscribe(id=list, batch=emails, double_optin=False, update_existing=True, replace_interests=False) print_result(result) # ---------------------------------------------------------------------- # group joins emails = {} join = GroupEvent.objects.filter(join=True) for j in join: print to_ascii(j.user.visible_name()), j.user.email, "joining", fix_encoding(j.group.name) # if they're not already on the list, build a profile for them if not emails.has_key(j.user.id): emails[j.user.id] = build_profile(j.user) emails[j.user.id]['GROUPINGS'] = [] # add this group to the user's list of groups emails[j.user.id]['GROUPINGS'] = add_group(j.group, emails[j.user.id]['GROUPINGS']) # ok, done. j.delete() if len(emails): result = mc.listBatchSubscribe(id=list, batch=emails.values(), double_optin=False, update_existing=True, replace_interests=False) print_result(result) # ---------------------------------------------------------------------- # group leaves emails = {} leave = GroupEvent.objects.filter(join=False) for l in leave: print to_ascii(l.user.visible_name()), l.user.email, "leaving", fix_encoding(l.group.name) # if they're not already on the list, build a profile for them try: if l.user.id not in emails: emails[l.user.id] = build_profile(l.user) info = mc.listMemberInfo(id=list, email_address=l.user.email) emails[l.user.id]['GROUPINGS'] = info['merges']['GROUPINGS'] # remove group from list emails[l.user.id]['GROUPINGS'] = remove_group(l.group, emails[l.user.id]['GROUPINGS']) except: print "--ERROR" # ok, done. l.delete() if len(emails): result = mc.listBatchSubscribe(id=list, batch=emails.values(), double_optin=False, update_existing=True, replace_interests=True) print_result(result)