def analyze_topics_globally(): print "Analyzing Topics..." collection = solr.all_topics_collection() se = SearchOptions() se.commonparams.q('*:*') futures = group(aggregate_global_topic.s(topic) for topic, _ in solr.iterate_per_facetfield_value(collection, se, 'topic_s'))() while not futures.ready(): print "Progress: (%d/%d)" % (futures.completed_count(), len(futures.results)) sleep(2) collection.add(get_with_backoff(futures, [])) collection.commit()
def get_title_top_authors(wiki_id, api_url, all_titles, all_revisions): """ Creates a dictionary of titles and its top authors :param wiki_id: the ID of the wiki :type wiki_id: int :param api_url: the API URL of the wiki :type api_url: str :param all_titles: a list of all title objects :type all_titles: list :param all_revisions: a dict keying titles to revisions :type all_revisions: dict :return: a dict keying title to top authors :rtype: dict """ print "Getting contributing authors for titles" futures = group(get_contributing_authors.s(wiki_id, api_url, title_obj, all_revisions[title_obj[u'title']]) for title_obj in all_titles if title_obj[u'title'] in all_revisions)() future_len = len(futures) cc = futures.completed_count() while not futures.ready(): new_cc = futures.completed_count() if new_cc > cc: print "%d/%d" % (new_cc, future_len) cc = new_cc time.sleep(1) title_to_authors = get_with_backoff(futures, []) if not title_to_authors: print "Failed to get title to authors. Connection failure?" return contribs_scaler = MinMaxScaler([author[u'contribs'] for title, authors in title_to_authors for author in authors]) print "Scaling top authors" scaled_title_top_authors = {} for title, authors in title_to_authors: new_authors = [] for author in authors: author[u'contribs'] = contribs_scaler.scale(author[u'contribs']) new_authors.append(author) scaled_title_top_authors[title] = new_authors return scaled_title_top_authors
def ingest_data(wiki_id): """ Create Solr documents for a given wiki ID :param wiki_id: the ID of the wiki (int or str) :type wiki_id: int :return: """ # make sure all pages and all user pages exists solr.existing_collection(solr.all_pages_collection()) solr.existing_collection(solr.all_user_pages_collection()) resp = requests.get(u'http://www.wikia.com/api/v1/Wikis/Details', params={u'ids': wiki_id}) items = resp.json()['items'] if wiki_id not in items: print u"Wiki doesn't exist?" return api_data = items[wiki_id] wiki_data = { 'id': api_data['id'], 'wam_f': {'set': api_data['wam_score']}, 'title_s': {'set': api_data['title']}, 'attr_title': {'set': api_data['title']}, 'attr_desc': {'set': api_data['desc']} } for key in api_data['stats'].keys(): wiki_data['%s_i' % key] = {'set': api_data['stats'][key]} wiki_api_data = requests.get(u'%swikia.php' % (api_data[u'url']), params={u'method': u'getForWiki', u'service': u'CrossWikiCore', u'controller': u'WikiaSearchIndexerController'}).json()[u'contents'] wiki_data[u'hub_s'] = wiki_api_data[u'hub_s'] # easier api_data[u'hub_s'] = wiki_api_data[u'hub_s'] collection = solr.existing_collection(solr.collection_for_wiki(wiki_id)) use_caching(is_read_only=True, shouldnt_compute=True) wpe = WikiPageToEntitiesService().get_value(wiki_id) if not wpe: print u"NO WIKI PAGE TO ENTITIES SERVICE FOR", wiki_id return False documents = [] grouped_futures = [] pages_to_authority = WikiAuthorityService().get_value(str(wiki_data['id'])) for counter, (doc_id, entity_data) in enumerate(wpe.items()): documents.append({ 'id': doc_id, 'attr_entities': {'set': list(set(entity_data.get(u'redirects', {}).values() + entity_data.get(u'titles')))}, 'type_s': {'set': 'Page'}, 'authority_f': {'set': pages_to_authority.get(doc_id, 0)}, 'hub_s': wiki_api_data['hub_s'] }) if counter != 0 and counter % 1500 == 0: grouped_futures.append( group(add_with_metadata.s(api_data, grouping) for grouping in iter_grouper(15, documents))() ) documents = [] grouped_futures.append( group(add_with_metadata.s(api_data, grouping) for grouping in iter_grouper(15, documents))() ) # block on completion of all grouped futures completed = 0 total = 0 while len(filter(lambda x: not x.ready(), grouped_futures)) > 1: new_completed = 0 new_total = 0 for future in grouped_futures: new_completed += future.completed_count() new_total += len(future.results) if completed != new_completed or total != new_total: completed = new_completed total = new_total print "Grouped Tasks: (%d/%d)" % (completed, total) sleep(2) all_user_tuples = [] for future in grouped_futures: result = get_with_backoff(future, []) map(all_user_tuples.extend, result) all_user_tuples = list(set(all_user_tuples)) if not all_user_tuples: print "Empty user tuples, bailing" return # assign the unique user ids to the first variable, and the unique usernames to the second all_user_ids, all_users = zip(*all_user_tuples) collection.commit() solr.all_pages_collection().commit() solr.all_user_pages_collection().commit() wiki_data['attr_entities'] = {'set': []} for count, entities in WikiEntitiesService().get_value(str(wiki_id)).items(): for entity in entities: map(wiki_data['attr_entities']['set'].append, [entity] * int(count)) # goddamnit count isn't int wiki_data['user_ids_is'] = {'set': all_user_ids} wiki_data['attr_users'] = {'set': all_users} wiki_data['total_authority_f'] = {'set': sum(pages_to_authority.values())} wiki_data['authorities_fs'] = {'set': pages_to_authority.values()} wiki_collection = solr.existing_collection(solr.global_collection()) wiki_collection.add([wiki_data]) wiki_collection.commit() print "Committed wiki data" print "Retrieving user docs..." futures = group(build_wiki_user_doc.s(api_data, user_tuple) for user_tuple in all_user_tuples)() future_result_len = len(futures.results) while not futures.ready(): print "Progress: (%d/%d)" % (futures.completed_count(), future_result_len) sleep(2) user_docs = get_with_backoff(futures, []) if not user_docs: print "User docs was empty. Possibly connection problems." return authority_scaler = MinMaxScaler([doc['total_page_authority_f']['set'] for doc in user_docs]) contribs_scaler = MinMaxScaler([doc['total_contribs_f']['set'] for doc in user_docs]) for doc in user_docs: scaled_authority = authority_scaler.scale(doc['total_page_authority_f']['set']) scaled_contribs = contribs_scaler.scale(doc['total_contribs_f']['set']) doc['scaled_authority_f'] = {'set': scaled_authority} doc['scaled_contribs_f'] = {'set': scaled_contribs} doc['scaled_contribs_authority_f'] = {'set': scaled_authority * scaled_contribs} wiki_user_collection = solr.existing_collection(solr.wiki_user_collection()) wiki_user_collection.add(user_docs) wiki_user_collection.commit() print "Analyzing topics" futures = group(get_wiki_topic_doc.s(wiki_data['id'], topic) for topic in list(set(wiki_data['attr_entities']['set'])))() future_result_len = len(futures.results) counter = 0 while not futures.ready(): if counter % 5 == 0: print "Progress: (%d/%d)" % (futures.completed_count(), future_result_len) sleep(2) counter += 1 topic_docs = get_with_backoff(futures, []) if not topic_docs: print "No topics, probably a connection error" return collection.add(topic_docs) collection.commit() topic_collection = solr.existing_collection(solr.all_topics_collection()) topic_collection.add(topic_docs) topic_collection.commit()
def etl(wiki_id): """ Runs the "api to database" ETL Note that we'll be moving to Solr instead of a database And note that we should move all these print statements to a logger :param wiki_id: :return: """ start = time.time() # get wiki info resp = requests.get(u'http://www.wikia.com/api/v1/Wikis/Details', params={u'ids': wiki_id}) items = resp.json()['items'] if wiki_id not in items: print u"Wiki doesn't exist?" return wiki_data = items[wiki_id] resp.close() print wiki_data[u'title'].encode(u'utf8') api_url = u'%sapi.php' % wiki_data[u'url'] # can't be parallelized since it's an enum all_titles = get_all_titles(api_url) print u"Got %d titles" % len(all_titles) results = group(get_all_revisions.s(api_url, title) for title in all_titles)() result_len = len(results) while not results.ready(): print "%d / %d" % (results.completed_count(), result_len) time.sleep(2) all_revisions = get_with_backoff(results, []) if not results: print "No revisions, probably connection error" print u"%d Revisions" % sum([len(revs) for title, revs in all_revisions]) all_revisions = dict(all_revisions) title_top_authors = get_title_top_authors(wiki_id, api_url, all_titles, all_revisions) print time.time() - start if title_top_authors is None: print "No title top authors!" return print "Calculating Centrality" centralities = author_centrality(title_top_authors) # this com_qscore_pr, the best metric per Qin and Cunningham comqscore_authority = dict([(doc_id, sum([author[u'contribs'] * centralities[author[u'user']] for author in authors]) ) for doc_id, authors in title_top_authors.items()]) print u"Got comsqscore, storing data" bucket = connect_s3().get_bucket(u'nlp-data') key = bucket.new_key(key_name=u'service_responses/%s/WikiAuthorCentralityService.get' % wiki_id) key.set_contents_from_string(json.dumps(centralities, ensure_ascii=False)) key = bucket.new_key(key_name=u'service_responses/%s/WikiAuthorityService.get' % wiki_id) key.set_contents_from_string(json.dumps(comqscore_authority, ensure_ascii=False)) map(set_page_key.delay, title_top_authors.items()) print wiki_id, u"finished in", time.time() - start, u"seconds"