found.damage = champion.damage found.objective_score = champion.objective_score found.tower_score = champion.tower_score found.won = champion.won found.pick_rate = (champion.seen / float(match_num)) found.num_seen = champion.seen db.session.add(found) db.session.commit() db.session.commit() def get_match_count(): LOGGING.push("Getting match count.") match_num = db.session.query(Match).count() LOGGING.push("*'" + str(match_num) + "'* matches found in the database.") return match_num def analyze_bans(): """Analyze banning patterns, looking at what popular bans, and when.""" pass if __name__ == '__main__': LOGGING.push("Analyzing database.") analyze_champions() LOGGING.push("Database has been analyzed.")
and # TODO(Need to encode this into unicode.) len(str(status.text.encode("unicode_escape"))) < 1000 ): LOGGING.push("*" + status.user.name + "*: " + LOGGING.clean(status.text)) Tweet.store_tweet(status) self.num_tweets += 1 if self.num_tweets % 100 == 0: LOGGING.push("*" + str(self.num_tweets) + "* tweets have been collected.") def on_error(self, status_code): LOGGING.push("Received error with status code: #", str(status_code) + "#. Disconnecting from stream.") return False def on_timeout(self): LOGGING.push("Server timed out.") return True if __name__ == "__main__": stream_listener = TweetStreamListener() stream = tweepy.Stream(auth=API.auth, listener=stream_listener) LOGGING.push("Starting to collect the sample stream.") stream.sample()
tweepy.Cursor(API.search, q=category, count=100).items(1000) ) for status in cursor: if ( db.session.query(Tweet).filter_by( tweet_id=status.id ).count() == 0 and # TODO(Need to encode this into unicode.) len(str(status.text.encode('unicode_escape'))) < 1000 ): LOGGING.push( "*" + status.user.name + "*: " + LOGGING.clean(status.text) ) Tweet.store_tweet(status) def crawl_search(): """Crawls the search.""" for category in CATEGORIES: crawl_category(category) # Stem out into related common words with searches? if __name__ == "__main__": LOGGING.push("Starting to crawl the search.") crawl_search() LOGGING.push("Finished crawling the search.")
# algorithm for assigning tier lists. Look at popularity on twitter? for champion in champions: # True forces the methods to force updates champion.get_score(True) champion.get_counters(True) champion.get_assists(True) # TODO(Use adjustments attribute to assign miscellaneous overall stats) # ChampionDatas as verticies. Directed edges against champions to # depict wins against that champion. Champion w/ highest out # degree is the most likely to win in the meta. # Can start storing specified MMR for games and create a single # source shortest path to a desired MMR. # Adjust all champions scores to model a normal distribution. # Adjustment scores are used for this. def approximate_normal(): """Approximate the normal distribution with champion scores.""" pass if __name__ == "__main__": LOGGING.push("Starting adjustments.") make_adjustments() LOGGING.push("Adjustments have been completed.") LOGGING.push("Now starting to approximate normal.") approximate_normal() LOGGING.push("Finished approximating normal.")