def identify(self, social_key, entity, request): differences = { social_key + '__add': entity[social_key], 'ts_match_' + social_key: int(time.time()) } print 'valid:', social_key, entity[social_key] if entity['match_' + social_key]: del entity['match_' + social_key] entity.partial_save() EntityHistory().delta(entity, differences) league = Entity().get_item(league=entity[keys.entity_league], profile=Entity().league_profile( entity[keys.entity_league])) curator = User().get_curator(entity[keys.entity_league]) tweet_txt = getattr(tweets, 'id_' + social_key)(entity, curator, league) try: print 'tweet message:', tweet_txt except: pass tweet_message = {} tweet_message.update(entity._data) tweet_message[twitter_keys.message_tweet] = tweet_txt request.write(json.dumps(tweet_message, cls=fixed.SetEncoder)) request.finish() return server.NOT_DONE_YET
def do_recover(recover): url = 'https://twitter.com/statuses/' + recover[Tweet.tweet_id] print 'url:', url check = requests.head(url, headers={ 'User-Agent': 'curl/7.35.0', 'Accept': '*/*' }) print 'twitter plus response:', check.status_code, 'url:', url if check.status_code == 301: redirect = check.headers['Location'] print 'redirect:', redirect new_twitter = twitter_keys.gettwitter(redirect) entity = Entity().get_item(league=recover[keys.entity_league], profile=recover[keys.entity_profile]) print 'new twitter:', new_twitter, 'league:', recover[ keys.entity_league], 'profile:', recover[ keys.entity_profile], 'existing twitter:', entity[ keys.entity_twitter] if entity[keys.entity_twitter] != new_twitter: entity[keys.entity_twitter] = new_twitter if twitter_keys.validate_twitter(entity): entity.partial_save() print 'save new twitter!' else: print 'twitter already updated'
def league_initialize(self): try: self.league = Entity().get_item(league=self.role[1], profile='league:' + self.role[1]) print 'league initialize complete:', self.league[ keys.entity_league] except Exception as e: print 'league initialize exception:', e
def league_scouting_report(league_name): from amazon.dynamo import Entity, User s1 = '%s %s' % (Entity().query_count(league__eq=league_name, profile__beginswith='team:'), 'Teams') s2 = '%s %s' % (Entity().query_count(league__eq=league_name, profile__beginswith='http:'), 'Players') s3 = '%s %s' % (Entity().query_count(index=Entity.index_twitter, league__eq=league_name), 'Tweeter(s)') # s4 = time.strftime("%a, %d %H:%M", time.localtime()) announce = u' '.join([User().get_curator(league_name)[user_keys.user_role] + '/#' + league_name, 'curates', league_name, 'with', s1, s2, s3]) return announce
def fc_standings(league_name, urls, teams, sub = {}): print 'league:', league_name, 'team length:', len(teams) for url in urls: fc_standings_html = yield cv.goto_url(url).addCallback(cv.to_html) print 'fc_standings html length:', len(fc_standings_html), url, cv.page().url().toString() team_tds = fc_standings_html.cssselect('tr.standings-row') print 'team tds:', len(team_tds) for i, team_td in enumerate(team_tds): rank = i + 1 try: tn = parse.csstext(team_td.cssselect('span.team-names')[0]) if tn in sub: #print 'sub:', tn, sub[tn] tn = sub[tn] wdl = team_td.cssselect('td[style="white-space:no-wrap;"]') wins = parse.csstext(wdl[0]) ties = parse.csstext(wdl[1]) losses = parse.csstext(wdl[2]) record = wins + '-' + losses + '-' + ties print 'team:', tn, 'record:', record found = False for t in Entity().query_2(league__eq=league_name, profile__eq='team:' + tn): found = True t[keys.entity_record] = record t[keys.entity_rank] = rank print tn, rank, record t.partial_save() if not found: for t2 in Entity().query_2(league__eq=league_name, profile__beginswith='team:' + tn): found = True t2[keys.entity_record] = record t2[keys.entity_rank] = rank print tn, rank, record t2.partial_save() if not found: try: potential_teams = [t3 for t3 in teams if tn in t3[keys.entity_profile]] if len(potential_teams) == 1: found = True potential_teams[0] potential_teams[0][keys.entity_record] = record potential_teams[0][keys.entity_rank] = rank print tn, rank, record potential_teams[0].partial_save() except: pass if not found: print ' missing:', tn, rank except Exception as e: print 'fc exception:', e #<span class="team-names">Barcelona</span> '''
def league_lists(league_name, do_add=True): print 'league list:', league_name league_user = User().get_by_role(league_name, keys.entity_twitter) league_user_app = auth.user_app(league_user) oauth = auth.get_oauth( league_user, league_user_app, league_user_app[user_keys.user_twitter_apps].keys()[0]) lists = restful.get_lists(league_user, league_user_app, oauth) print 'lists:', len(lists), [_l['name'] for _l in lists] league_user[user_keys.user_home_lists] = len(lists) league_user.save() for team in Entity().query_2(league__eq=league_name, profile__beginswith='team:', reverse=True): team_name = make_list_name(team[keys.entity_profile].split(':')[1]) if not get_list_id(team_name, lists): print 'missing list:', team_name list_description = u'Tracking {0} {1} from {2}'.format( league_user[keys.entity_name], league_user[keys.entity_lingo]['players'], team_name) restful.create_list(league_user, league_user_app, team_name, list_description, oauth) else: print league_name, 'has:', team_name list_id = get_list_id(team_name, lists) existing_members = restful.list_members(league_user, league_user_app, list_id, oauth) print team_name, 'existing members:', len(existing_members) if do_add: add_members = [] for e in Entity().query_2( index=Entity.index_team_profile, team__eq=team[keys.entity_profile].split(':', 1)[1], query_filter={'twitter__null': False}): check_twitter(e, add_members, existing_members) if len(add_members) == 100: if not do_add_members(league_user, league_user_app, list_id, add_members, oauth): return add_members = [] if len(add_members) > 0: if not do_add_members(league_user, league_user_app, list_id, add_members, oauth): return print 'excess members:', len( existing_members), existing_members remove_members = [] for em in existing_members: remove_members.append(em) if len(remove_members) == 100: do_remove_members(league_user, league_user_app, list_id, remove_members, oauth) remove_members = [] if len(remove_members) > 0: do_remove_members(league_user, league_user_app, list_id, remove_members, oauth)
def render_GET(self, request): shared.SharedPath().response_headers(request, 'application/json') kwargs = { 'league': request.postpath[0], 'profile': Entity().league_profile(request.postpath[0]) } print 'get league:', kwargs threads.deferToThread( Entity().get_item, **kwargs).addCallback(lambda ans: json.dumps( shared.treat(ans), cls=fixed.SetEncoder)).addCallback( request.write).addCallback(lambda ign: request.finish()) return server.NOT_DONE_YET
def find_by_profile(self, request, profile): if len(request.postpath) == 0 or request.postpath[0] == '': return [ e for e in Entity().query_2( index=Entity.index_site_profile, site__eq=shared.SharedPath().path_site(request), profile__eq=profile) ][0] elif len(request.postpath) == 1: return Entity().get_item( league=self.path_league(request)[keys.entity_league], profile=profile) elif len(request.postpath) == 2: return
def buildandsave(site, exit_on_save=True): agent = Agent(reactor) curator = User().get_by_role(site, keys.entity_twitter) leagues = [] deferreds_league = [] for l in curator[user_keys.user_site_leagues]: league = Entity().get_item(league=l, profile='league:' + l) d = agent.request( "HEAD", str('http://' + league[keys.entity_site] + '/tw/' + league[keys.entity_twitter_id] + '/avatar_large.png')) d.addCallback(add_redirect, league, leagues, 'large') deferreds_league.append(d) yield defer.DeferredList(deferreds_league) print 'leagues length:', len(leagues) players = [] deferreds_small = [] for p in Entity().query_2(index=Entity.index_site_profile, site__eq=curator[user_keys.user_role], query_filter={'twitter__null': False}, limit=200): d = agent.request( "HEAD", str('http://' + p[keys.entity_site] + '/tw/' + p[keys.entity_twitter_id] + '/avatar_small.png')) d.addCallback(add_redirect, p, players, 'small') deferreds_small.append(d) yield defer.DeferredList(deferreds_small) print 'players length:', len(players) view = QQuickView() view.setSource(QUrl('qml/render/curator_twitter_bg.qml')) view.rootObject().setProperty('bgcolor', 'black') view.setWidth(1500) view.setHeight(500) view.show() view.rootObject().setProperty('curator', curator._data) view.rootObject().setProperty('leagues', leagues) view.rootObject().setProperty('players', players) yield task.deferLater(reactor, 30, screenshot, view, site, curator) if exit_on_save: print 'exit on save' reactor.callLater(0, reactor.stop) else: print 'done'
def match_qualify(self, match_stats, qualified): if keys.entity_twitter not in match_stats or self.page().url( ).toString() == 'https://twitter.com/account/suspended' or match_stats[ keys.entity_twitter] in [ ems[keys.entity_twitter] for ems in qualified ]: return qualified fik = 0 if twitter_keys.match_followers_you_know not in match_stats else match_stats[ twitter_keys.match_followers_you_know] if fik >= self.get_qualifying() or match_stats[ twitter_keys.match_blocked] or match_stats[ twitter_keys.match_protected]: for et in Entity().query_2( index=Entity.index_twitter_league, twitter__eq=match_stats[keys.entity_twitter]): print 'already in league:', et[keys.entity_league] if et[keys.entity_league] != 'celebrity' and self.league[ keys.entity_league] != 'celebrity': return qualified print 'qualified:', match_stats[ keys.entity_twitter], 'match that i know:', fik qualified.append(match_stats) elif match_stats[twitter_keys.match_protected]: print ' protected:', 'https://twitter.com/' + match_stats[ keys.entity_twitter] return qualified
def match_clean(self): now = int(time.time()) print 'match loop qualifying twitter:', self.get_qualifying() existing = 0 for clean in Entity().query_2( league__eq=self.role[1], query_filter={'match_twitter__null': False}): if isinstance(clean[keys.entity_match_twitter], Decimal): if now - clean[keys.entity_match_twitter] > 60 * 60 * 24 * 7: del clean[keys.entity_match_twitter] try: print 'clean:', clean[keys.entity_profile] except: pass clean.partial_save() else: dirty = False for m in clean[keys.entity_match_twitter]: if time_keys.ts_match_twitter not in m or now - m[ time_keys.ts_match_twitter] > 60 * 60 * 24 * 14: clean[keys.entity_match_twitter].remove(m) dirty = True if dirty: clean.partial_save() else: existing += 1 print 'existing matches:', existing
def fbs_standings(): teams = [t for t in Entity().query_2(index=Entity.index_site_profile, site__eq='d1tweets.com', profile__beginswith='team:')] standings_html = yield cv.goto_url('http://www.espn.com/college-football/standings').addCallback(cv.to_html) for td in standings_html.cssselect('tr.standings-row td.team'): record = parse.csstext(td.getnext().getnext().getnext().getnext()) fb_team_href = td.cssselect('a')[0].attrib['href'].rsplit('/', 1)[1] try: team = fbs_get_team(teams, fb_team_href) team[keys.entity_record] = record print team[keys.entity_profile].split(':', 1)[1], record team.partial_save() except: print 'no such luck:', fb_team_href rankings_html = yield cv.goto_url('http://www.espn.com/college-football/rankings').addCallback(cv.to_html) try: for h2 in rankings_html.cssselect('h2.table-caption'): if parse.csstext(h2) == 'AP Top 25': for r in h2.getparent().cssselect('table')[0].cssselect('span.number'): fb_team_href = r.getparent().getnext().cssselect('a.logo')[0].attrib['href'].rsplit('/', 1)[1] team = fbs_get_team(teams, fb_team_href) team[keys.entity_rank] = parse.csstext(r) team.partial_save() except Exception as e: print 'e:', e
def bio_hash(entity, league): from amazon.dynamo import Entity, ProfileTwitter tweet = [] try: es = ProfileTwitter().profile_recent(entity[keys.entity_twitter_id]) for ht in re.findall(r"#(\w+)", es[ProfileTwitter.description]): add_hash = '#' + ht if not ht.isdigit() and add_hash not in tweet: tweet.append(add_hash) except: pass try: te = Entity().get_item(league=league[keys.entity_league], profile='team:' + entity[keys.entity_team]) ts = ProfileTwitter().profile_recent(te[keys.entity_twitter_id]) for ht in re.findall(r"#(\w+)", ts[ProfileTwitter.description]): add_hash = '#' + ht if not ht.isdigit() and add_hash not in tweet: tweet.append(add_hash) except: pass try: ls = ProfileTwitter().profile_recent(league[keys.entity_twitter_id]) for ht in re.findall(r"#(\w+)", ls[ProfileTwitter.description]): add_hash = '#' + ht if not ht.isdigit() and add_hash not in tweet: tweet.append(add_hash) except: pass return tweet
def publish_loop(): for site in User().get_sites(): b = s3.Bucket(site[user_keys.user_role]) for league_name in [ l for l in site[user_keys.user_site_leagues] if not leagues or l in leagues ]: league = Entity().get_league(league_name) prefix_1 = (league[keys.entity_league] if league[keys.entity_emblem] is None else league[keys.entity_emblem]) + '/logo/' prefix_2 = (league[keys.entity_league] if league[keys.entity_emblem] is None else league[keys.entity_emblem]) + '/logo_standard/' for prefix in [prefix_1, prefix_2]: for logo in os.listdir('/home/ubuntu/' + site[user_keys.user_role] + '/' + prefix): fp = '/home/ubuntu/' + site[ user_keys.user_role] + '/' + prefix + logo objs = list(b.objects.filter(Prefix=prefix + logo)) print b.name, logo if len(objs) > 0 and len(sys.argv) < 2: print 'exists:', logo else: print 'missing or overwrite:', logo data = open(fp, 'rb') b.put_object(ACL='public-read', Key=prefix + logo, Body=data, ContentType='image/svg+xml')
def nfl_standings(): nfl_standings = yield cv.goto_url('http://www.espn.com/nfl/standings').addCallback(cv.to_html) for span in nfl_standings.cssselect('span span.team-names'): td = span.getparent().getparent().getparent() wins = parse.csstext(td.getnext()) losses = parse.csstext(td.getnext().getnext()) ties = parse.csstext(td.getnext().getnext().getnext()) tn = parse.csstext(span) try: record = wins + '-' + losses + '-' + ties t = Entity().get_item(league='nfl', profile='team:' + tn) t[keys.entity_record] = record print tn, record t.partial_save() except Exception as e: print e
def check_avatar(self, incoming, i): b = s3.bucket_straight('socialcss.com') filename = 'insta/' + incoming[ keys.entity_instagram_id] + '/' + fixed.digest( i[instagram_keys.instagram_avi]) + '.png' if not s3.check_key(b, filename): local_large_avi_path = '/tmp/instagram/avi_' + incoming[ keys.entity_instagram_id] + i[instagram_keys.instagram_avi][ i[instagram_keys.instagram_avi].rindex('.'):] fixed.filesubpath(local_large_avi_path) with open(local_large_avi_path, 'w') as large_file: response = urllib2.urlopen( i[instagram_keys.instagram_avi]).read() large_file.write(response) if local_large_avi_path[-3:].lower( ) == 'jpg' or local_large_avi_path[-4:].lower() == 'jpeg': new_local_large_avi_path = '/tmp/large/png/' + incoming[ keys.entity_instagram_id] + '.png' fixed.filesubpath(new_local_large_avi_path) args = [ 'convert', local_large_avi_path, new_local_large_avi_path ] subprocess.check_call(args) local_large_avi_path = new_local_large_avi_path misc.round_corners(local_large_avi_path, 'insta') e = Entity().get_item(league=incoming[keys.entity_league], profile=incoming[keys.entity_profile]) s3.save_insta_avi(i[instagram_keys.instagram_avi], local_large_avi_path, e._data)
def check_entities(self, tweet): if twitter_keys.entities in tweet: if twitter_keys.user_mentions in tweet[twitter_keys.entities]: mentions_checked = set([]) for mention in tweet[twitter_keys.entities][ twitter_keys.user_mentions]: if mention[twitter_keys.screen_name] != tweet[ twitter_keys.user][twitter_keys.screen_name]: if mention[twitter_keys. screen_name] not in mentions_checked: print 'check:', mention[ twitter_keys.screen_name], mention mentions_checked.add( mention[twitter_keys.screen_name]) for e3 in Entity().query_2( index=Entity.index_twitter_league, twitter__eq=mention[ twitter_keys.screen_name]): print 'know mention:', e3[ keys.entity_twitter], e3[ keys.entity_league] if Tweet.known_mentions not in tweet: tweet[Tweet.known_mentions] = [] from_league = { Tweet.tweet_user_id: mention[twitter_keys.id_str] } from_league.update(e3._data) self.clean_up(from_league) tweet[Tweet.known_mentions].append(from_league) if self.get_entities_urls(tweet): print 'urls:', self.get_entities_urls(tweet)
def lookup(self, oldid): if len(self.old_entity) == 0: for e in Entity().query_2(league__eq='mlb', profile__beginswith='http://'): self.old_entity.append(e) for oe in self.old_entity: if oe[keys.entity_profile].endswith(oldid): return oe
def derived_entities(self, players): derived_team_names = [] missing_team = [] found_team = [] lost_team = [] for p in players: if keys.entity_team in p and p[ keys.entity_team] not in derived_team_names: derived_team_names.append(p[keys.entity_team]) for team_name in derived_team_names: try: team_entity = Entity().get_item( league=self.get_league_name(), profile=Entity().team_profile(team_name)) print 'found team:', fixed.team_name(team_entity) found_team.append(team_entity) #players.append(team_entity) except Exception as e: missing_team.append(team_name) print 'derived missing team:', team_name, self.get_league_name( ), e if self.create_missing_teams: Entity().put_item( data={ keys.entity_league: self.get_league_name(), keys.entity_profile: Entity().team_profile(team_name), keys.entity_site: User().get_curator(self.get_league_name())[ user_keys.user_role] }) for e in Entity().query_2(league__eq=self.get_league_name(), profile__beginswith='team:'): if e[keys.entity_profile] not in [ ft[keys.entity_profile] for ft in found_team ]: print 'lost team:', fixed.team_name(e) lost_team.append(e) if self.remove_lost_teams: e.delete() print 'derived_team_names:', len( derived_team_names), 'missing_team:', len( missing_team), 'found team:', len( found_team), 'lost team:', len(lost_team)
def match_loop(self): i = 1 for ltp in Entity().query_2(league__eq=self.role[1], query_filter=self.qf): print 'consider:', i, 'of:', self.consider, ltp[ keys.entity_league], ltp[keys.entity_profile] yield self.match_entity(ltp) i += 1 self.match_done()
def click_match(): hide_buttons() try: entity = Entity().get_item(league=t[keys.entity_league], profile=t[keys.entity_profile]) d.callback(entity) except ItemNotFound: print 'cannot find history' d.callback(False)
def match(self): self.blocked = twitter_keys.get_blocked( self.curator[user_keys.user_role], keys.entity_twitter) print 'blocked twitter:', self.blocked self.match_clean() self.consider = Entity().query_count(league__eq=self.role[1], query_filter=self.qf) print 'match consider:', self.consider d = self.match_loop() d.addErrback(self.error_view) return d
def stalk_analysis(league_name): mutuals = [] extras = [] for mf in Entity().query_2(league__eq=league_name): if 'twitter' in mf and 'ts_followers_' + league_name in mf: try: ts_add = None if mf[time_keys.ts_scout]: ts_add = mf[time_keys.ts_scout] slt = ProfileTwitter().profile_last(mf[keys.entity_twitter_id], None, ts_add) if slt is not None: print 'append:', mf[keys.entity_twitter], 'since:', fixed.lingo_since_date(ts_add) mutuals.append( (mf, slt) ) else: print 'no last stats:', mf[keys.entity_twitter], mf[keys.entity_twitter_id] except Exception as e: print 'missing:', e, 'https://twitter.com/' + mf[keys.entity_twitter], fixed.lingo_since(mf, twitter_keys.league_ts_followers(league_name)) else: extras.append(mf._data) print 'extras length:', len(extras) for mutual_seq in mutuals: try: mutual = mutual_seq[0] mutual_slt = mutual_seq[1] print 'mutual:', mutual[keys.entity_twitter] tf = set([]) for other_seq in [others for others in mutuals if others[0] != mutual and twitter_keys.league_mutual(league_name) in others[1]]: if mutual[keys.entity_twitter_id] in other_seq[1][twitter_keys.league_mutual(league_name)]: tf.add(other_seq[0][keys.entity_twitter_id]) if len(tf) > 0: print mutual[keys.entity_twitter], 'follows:', len(tf), 'following:', 0 if twitter_keys.league_mutual(league_name) not in mutual_slt else len(mutual_slt[twitter_keys.league_mutual(league_name)]) mutual_slt[twitter_keys.league_follows(league_name)] = tf mutual_slt.partial_save() else: print 'not following anyone:', mutual[keys.entity_twitter], mutual[keys.entity_twitter_id] except Exception as e: print 'mutual exception:', e publish = [] curator = User().get_curator(league_name) for mutual_seq_2 in mutuals: try: p = {} mutual = mutual_seq_2[0] p.update(mutual._data) mutual_slt = mutual_seq_2[1] p.update(mutual_slt._data) publish.append(p) except: print 'mutual exception:', e b = s3.bucket_straight(curator[user_keys.user_role]) filename = league_name + '/db/bible.json' meta = shared.entity_filter(curator._data) output = shared.dump(publish + extras) s3.save_s3(b, filename, output , None, content_type='application/json', acl='public-read', meta=meta, encode='gzip')
def gather_active_roster(self, h, team): doc = html.document_fromstring(h) #/html/body/div[1]/div[3]/div[1]/section/div/section[1]/table team[keys.entity_team] = doc.cssselect( 'meta[property="og:site_name"]')[0].attrib['content'] for t in doc.xpath('//table[@class="data roster_table"][@summary]'): for pt in t.xpath('preceding-sibling::h4'): position = pt.text if pt.text[-1] == 's': position = pt.text[:-1] for player in t.xpath('tbody/tr[position() > 0]'): #print etree.tostring(player) try: player_dict = {} player_dict[keys.entity_position] = position player_dict[keys.entity_profile] = fixed.clean_url( 'http://m.mlb.com' + player[2].xpath('a/@href')[0]) if player[0].text: player_dict[keys.entity_jersey] = player[0].text if player_dict[keys.entity_jersey] == '42': try: e = Entity().get_item( league='mlb', profile=player_dict[ keys.entity_profile]) player_dict[keys.entity_jersey] = e[ keys.entity_jersey] except: pass player_dict[keys.entity_name] = player[2].xpath( 'a[starts-with(@href, "/player/")]')[0].text try: player_dict[keys.entity_status] = etree.tostring( player[2], method="text").strip().split('<br>')[1] print 'has status:', player_dict[ keys.entity_status] except: pass player_dict[keys.entity_height] = player[4].text player_dict[keys.entity_weight] = player[5].text player_dict[keys.entity_born] = player[6].text bt = player[3].text player_dict['bats'] = bt.split("/")[0] player_dict['throws'] = bt.split("/")[1] #print player_dict team['players'].append(player_dict) except Exception as e: print 'player exception:', e print 'team:', team['team'], 'players length:', len(team['players']) return team
def name_collision(self, league_name, entity_profile, entity_name): name_collisions = [] for np in Entity().query_2(league__eq=league_name, query_filter={ 'name__eq': entity_name, 'profile__ne': entity_profile }, conditional_operator='AND'): print 'found name collision:', np._data[ keys.entity_name], entity_profile name_collisions.append(np) return name_collisions
def mlb_standings(): from league.scrape.mlb import MLB mlb_abbrv = MLB().abbr yield cv.goto_url('http://www.mlb.com/mlb/standings') yield task.deferLater(reactor, 2, defer.succeed, True) html = yield cv.to_html() for td in html.cssselect('td.dg-team_full'): a = td.cssselect('a')[0] wins = parse.csstext(td.getnext()) losses = parse.csstext(td.getnext().getnext()) record = wins + '-' + losses try: attrb = a.attrib['class'] if attrb in mlb_sub: attrb = mlb_sub[attrb] t = Entity().get_item(league='mlb', profile = 'team:' + mlb_abbrv[attrb]) t[keys.entity_record] = record print t[keys.entity_profile].split(':', 1)[1], record, attrb t.partial_save() except Exception as e: print 'mlb exception:', e
def get_retweet(self, tweet): if twitter_keys.retweeted_status in tweet: print 'retweet_status:', 'https://twitter.com/' + tweet[ twitter_keys.retweeted_status][twitter_keys.user][ twitter_keys.screen_name] + '/status/' + str(tweet[ twitter_keys.retweeted_status][twitter_keys.id_str]) for e2 in Entity().query_2( index=Entity.index_twitter_league, twitter__eq=tweet[twitter_keys.retweeted_status][ twitter_keys.user][twitter_keys.screen_name]): print 'know retweet:', e2[keys.entity_twitter], e2[ keys.entity_league] return e2
def curator_lists(curator_name, league_name, do_add=True): print 'curator list:', curator_name, 'league:', league_name curator_user = User().get_by_role(curator_name, keys.entity_twitter) oauth = auth.get_oauth(curator_user, curator_user, curator_user[user_keys.user_twitter_apps].keys()[0]) lists = restful.get_lists(curator_user, curator_user, oauth) curator_user[user_keys.user_home_lists] = len(lists) curator_user.save() u = User().get_by_role(league_name, keys.entity_twitter) list_name = make_list_name(u[keys.entity_name].replace(' ', '-').replace( '/', '_')) if not get_list_id(list_name, lists): print 'missing list:', list_name restful.create_list(curator_user, curator_user, list_name, u[keys.entity_description], oauth) else: print curator_name, league_name, 'has:', list_name list_id = get_list_id(list_name, lists) existing_members = restful.list_members(curator_user, curator_user, list_id, oauth) print 'existing members:', len(existing_members) if do_add: add_members = [] for e in Entity().query_2(league__eq=league_name, query_filter={'twitter__null': False}): check_twitter(e, add_members, existing_members) if len(add_members) == 100: if not do_add_members(curator_user, curator_user, list_id, add_members, oauth): return add_members = [] if len(add_members) > 0: if not do_add_members(curator_user, curator_user, list_id, add_members, oauth): return print 'excess members:', len(existing_members), existing_members remove_members = [] for em in existing_members: remove_members.append(em) if len(remove_members) == 100: do_remove_members(curator_user, curator_user, list_id, remove_members, oauth) remove_members = [] if len(remove_members) > 0: do_remove_members(curator_user, curator_user, list_id, remove_members, oauth)
def get_conversation(self, tweet): if twitter_keys.in_reply_to_screen_name in tweet: try: conversation_entity = [ e for e in Entity().query_2( index=Entity.index_twitter_league, twitter__eq=tweet[ twitter_keys.in_reply_to_screen_name], league__eq=self.league) ][0] print 'conversion:', tweet[ twitter_keys. in_reply_to_screen_name] #, tweet[twitter_keys.in_reply_to_status_id] return conversation_entity except: pass
def nhl_standings(): nhl_standings = yield cv.goto_url('https://www.nhl.com/standings').addCallback(cv.to_html) for span in nhl_standings.cssselect('a span.team--name'): try: tn = parse.csstext(span) td = span.getparent().getparent().getparent() wins = parse.csstext(td.getnext().getnext()) losses = parse.csstext(td.getnext().getnext().getnext()) ot = parse.csstext(td.getnext().getnext().getnext().getnext()) record = wins + '-' + losses + '-' + ot for t in Entity().query_2(league__eq='nhl', profile__beginswith='team:' + tn): t[keys.entity_record] = record print tn, record t.partial_save() except Exception as e: print e