def _fetch_tweets(twit,uid): tweets = Tweets.get_id(uid) if not tweets: tweets_ = twit.user_timeline(uid) tweets = Tweets(_id=uid,tweets=tweets_) tweets.save() return tweets
def geo_ats(): """ fetch all at mentions from database """ for tweets in Tweets.find({},fields=['ats']): if tweets.ats: uid = tweets._id yield User.mod_id(uid), (uid,tweets.ats)
def test_find_contacts_errors(self): self.FS["mloc_users.04"] = [dict(id=404)] self.FS["mloc_users.03"] = [dict(id=503)] with _patch_twitter(): self.gob.run_job("find_contacts") for uid in (404, 503): missing = User.get_id(uid) self.assertEqual(missing.error_status, uid) self.assertEqual(missing.neighbors, None) self.assertEqual(missing.rfriends, None) self.assertEqual(Edges.get_id(uid), None) self.assertEqual(Tweets.get_id(uid), None)
def nebrs_d(user_d,mloc_blur): """ create dict with lots of information about a target user's located contacts """ mb = MlocBlur(*mloc_blur) user = User(user_d) nebrs = User.find(User._id.is_in(user_d['nebrs'])) tweets = Tweets.get_id(user_d['_id'],fields=['ats']) res = make_nebrs_d(user,nebrs,tweets.ats) res['mloc'] = user_d['mloc'] res['gnp'] = _blur_gnp(mb, user_d) return [res]
def total_contacts(user_ds): """ count the total number of contacts (to include in the paper) """ for user_d in itertools.islice(user_ds,2600): user = User.get_id(user_d['id']) if not user: yield "no user" elif user.error_status: yield str(user.error_status) else: edges = Edges.get_id(user._id) tweets = Tweets.get_id(user._id) if not edges or not tweets: yield "no contacts" else: sets = _contact_sets(tweets,edges) yield [len(sets[k]) for k in User.NEBR_KEYS]