def insert_entities(args):
    try:
        use_caching(is_read_only=True, shouldnt_compute=True)
        db, cursor = get_db_and_cursor(args)

        wpe = WikiPageToEntitiesService().get_value(args.wid)
        if not wpe:
            print u"NO WIKI PAGE TO ENTITIES SERVICE FOR", args.wid
            return False

        print u"Priming entity data on", args.wid
        for page, entity_data in wpe.items():
            entity_list = map(
                my_escape,
                list(
                    set(
                        entity_data.get(u'redirects', {}).values() +
                        entity_data.get(u'titles'))))
            for i in range(0, len(entity_list), 50):
                cursor.execute(u"""
                INSERT IGNORE INTO topics (name) VALUES ("%s")
                """ % u'"), ("'.join(entity_list[i:i + 50]))
                db.commit()
        return args
    except Exception as e:
        print e, traceback.format_exc()
        return False
예제 #2
0
def get_data(wid):
    log(wid)
    use_caching(shouldnt_compute=True)
    #should be CombinedEntitiesService yo
    doc_ids_to_heads = WikiToPageHeadsService().get_value(wid, {})
    doc_ids_to_entities = WikiPageToEntitiesService().get_value(wid, {})
    doc_ids_combined = {}
    if doc_ids_to_heads == {}:
        log(wid, "no heads")
    if doc_ids_to_entities == {}:
        log(wid, "no entities")
    from_s3 = json.loads(
        bucket.get_key('feature-data/page-%s.json' %
                       wid).get_contents_as_string())
    for doc_id in doc_ids_to_heads:
        entity_response = doc_ids_to_entities.get(doc_id, {
            'titles': [],
            'redirects': {}
        })
        doc_ids_combined[doc_id] = (map(
            preprocess,
            entity_response['titles'] + entity_response['redirects'].keys() +
            entity_response['redirects'].values() +
            list(set(doc_ids_to_heads.get(doc_id, [])))) +
                                    from_s3.get(doc_id, []))
    return doc_ids_combined.items()
예제 #3
0
def insert_entities(args):
    try:
        use_caching(is_read_only=True, shouldnt_compute=True)
        db, cursor = get_db_and_cursor(args)

        wpe = WikiPageToEntitiesService().get_value(args.wid)
        if not wpe:
            print u"NO WIKI PAGE TO ENTITIES SERVICE FOR", args.wid
            return False

        print u"Priming entity data on", args.wid
        for page, entity_data in wpe.items():
            entity_list = map(
                my_escape, list(set(entity_data.get(u"redirects", {}).values() + entity_data.get(u"titles")))
            )
            for i in range(0, len(entity_list), 50):
                cursor.execute(
                    u"""
                INSERT IGNORE INTO topics (name) VALUES ("%s")
                """
                    % u'"), ("'.join(entity_list[i : i + 50])
                )
                db.commit()
        return args
    except Exception as e:
        print e, traceback.format_exc()
        return False
def get_data_wid(wid):
    print wid
    use_caching(shouldnt_compute=True)
    # should be CombinedEntitiesService yo
    doc_ids_to_heads = WikiToPageHeadsService().get_value(wid, {})
    doc_ids_to_entities = WikiPageToEntitiesService().get_value(wid, {})
    doc_ids_combined = {}
    if doc_ids_to_heads == {}:
        print wid, "no heads"
    if doc_ids_to_entities == {}:
        print wid, "no entities"
    for doc_id in doc_ids_to_heads:
        entity_response = doc_ids_to_entities.get(doc_id, {"titles": [], "redirects": {}})
        doc_ids_combined[doc_id] = map(
            preprocess,
            entity_response["titles"]
            + entity_response["redirects"].keys()
            + entity_response["redirects"].values()
            + list(set(doc_ids_to_heads.get(doc_id, []))),
        )
    return doc_ids_combined.items()
def get_data_wid(wid):
    print wid
    use_caching(shouldnt_compute=True)
    #should be CombinedEntitiesService yo
    doc_ids_to_heads = WikiToPageHeadsService().get_value(wid, {})
    doc_ids_to_entities = WikiPageToEntitiesService().get_value(wid, {})
    doc_ids_combined = {}
    if doc_ids_to_heads == {}:
        print wid, "no heads"
    if doc_ids_to_entities == {}:
        print wid, "no entities"
    for doc_id in doc_ids_to_heads:
        entity_response = doc_ids_to_entities.get(doc_id, {
            'titles': [],
            'redirects': {}
        })
        doc_ids_combined[doc_id] = map(
            preprocess,
            entity_response['titles'] + entity_response['redirects'].keys() +
            entity_response['redirects'].values() +
            list(set(doc_ids_to_heads.get(doc_id, []))))
    return doc_ids_combined.items()
예제 #6
0
def get_data(wid):
    log(wid)
    use_caching(shouldnt_compute=True)
    #should be CombinedEntitiesService yo
    doc_ids_to_heads = WikiToPageHeadsService().get_value(wid, {})
    doc_ids_to_entities = WikiPageToEntitiesService().get_value(wid, {})
    doc_ids_combined = {}
    if doc_ids_to_heads == {}:
        log(wid, "no heads")
    if doc_ids_to_entities == {}:
        log(wid, "no entities")
    from_s3 = json.loads(bucket.get_key(
        'feature-data/page-%s.json' % wid).get_contents_as_string())
    for doc_id in doc_ids_to_heads:
        entity_response = doc_ids_to_entities.get(
            doc_id, {'titles': [], 'redirects': {}})
        doc_ids_combined[doc_id] = (map(preprocess,
                                        entity_response['titles'] +
                                        entity_response['redirects'].keys() +
                                        entity_response['redirects'].values() +
                                        list(set(doc_ids_to_heads.get(doc_id,
                                                                      [])))) +
                                    from_s3.get(doc_id, []))
    return doc_ids_combined.items()
예제 #7
0
def ingest_data(wiki_id):
    """
    Create Solr documents for a given wiki ID

    :param wiki_id: the ID of the wiki (int or str)
    :type wiki_id: int
    :return:
    """
    # make sure all pages and all user pages exists
    solr.existing_collection(solr.all_pages_collection())
    solr.existing_collection(solr.all_user_pages_collection())

    resp = requests.get(u'http://www.wikia.com/api/v1/Wikis/Details', params={u'ids': wiki_id})
    items = resp.json()['items']
    if wiki_id not in items:
        print u"Wiki doesn't exist?"
        return

    api_data = items[wiki_id]
    wiki_data = {
        'id': api_data['id'],
        'wam_f': {'set': api_data['wam_score']},
        'title_s': {'set': api_data['title']},
        'attr_title': {'set': api_data['title']},
        'attr_desc': {'set': api_data['desc']}
    }
    for key in api_data['stats'].keys():
        wiki_data['%s_i' % key] = {'set': api_data['stats'][key]}

    wiki_api_data = requests.get(u'%swikia.php' % (api_data[u'url']),
                                 params={u'method': u'getForWiki',
                                         u'service': u'CrossWikiCore',
                                         u'controller': u'WikiaSearchIndexerController'}).json()[u'contents']

    wiki_data[u'hub_s'] = wiki_api_data[u'hub_s']
    
    # easier
    api_data[u'hub_s'] = wiki_api_data[u'hub_s']

    collection = solr.existing_collection(solr.collection_for_wiki(wiki_id))

    use_caching(is_read_only=True, shouldnt_compute=True)

    wpe = WikiPageToEntitiesService().get_value(wiki_id)
    if not wpe:
        print u"NO WIKI PAGE TO ENTITIES SERVICE FOR", wiki_id
        return False

    documents = []

    grouped_futures = []

    pages_to_authority = WikiAuthorityService().get_value(str(wiki_data['id']))
    for counter, (doc_id, entity_data) in enumerate(wpe.items()):
        documents.append({
            'id': doc_id,
            'attr_entities': {'set': list(set(entity_data.get(u'redirects', {}).values()
                                              + entity_data.get(u'titles')))},
            'type_s': {'set': 'Page'},
            'authority_f': {'set': pages_to_authority.get(doc_id, 0)},
            'hub_s': wiki_api_data['hub_s']
        })

        if counter != 0 and counter % 1500 == 0:
            grouped_futures.append(
                group(add_with_metadata.s(api_data, grouping) for grouping in iter_grouper(15, documents))()
            )

            documents = []

    grouped_futures.append(
        group(add_with_metadata.s(api_data, grouping) for grouping in iter_grouper(15, documents))()
    )

    # block on completion of all grouped futures
    completed = 0
    total = 0
    while len(filter(lambda x: not x.ready(), grouped_futures)) > 1:
        new_completed = 0
        new_total = 0
        for future in grouped_futures:
            new_completed += future.completed_count()
            new_total += len(future.results)
        if completed != new_completed or total != new_total:
            completed = new_completed
            total = new_total
            print "Grouped Tasks: (%d/%d)" % (completed, total)
        sleep(2)

    all_user_tuples = []
    for future in grouped_futures:
        result = get_with_backoff(future, [])
        map(all_user_tuples.extend, result)

    all_user_tuples = list(set(all_user_tuples))
    if not all_user_tuples:
        print "Empty user tuples, bailing"
        return

    # assign the unique user ids to the first variable, and the unique usernames to the second
    all_user_ids, all_users = zip(*all_user_tuples)

    collection.commit()
    solr.all_pages_collection().commit()
    solr.all_user_pages_collection().commit()

    wiki_data['attr_entities'] = {'set': []}

    for count, entities in WikiEntitiesService().get_value(str(wiki_id)).items():
        for entity in entities:
            map(wiki_data['attr_entities']['set'].append, [entity] * int(count))  # goddamnit count isn't int

    wiki_data['user_ids_is'] = {'set': all_user_ids}
    wiki_data['attr_users'] = {'set': all_users}
    wiki_data['total_authority_f'] = {'set': sum(pages_to_authority.values())}
    wiki_data['authorities_fs'] = {'set': pages_to_authority.values()}

    wiki_collection = solr.existing_collection(solr.global_collection())
    wiki_collection.add([wiki_data])
    wiki_collection.commit()
    print "Committed wiki data"

    print "Retrieving user docs..."
    futures = group(build_wiki_user_doc.s(api_data, user_tuple) for user_tuple in all_user_tuples)()
    future_result_len = len(futures.results)
    while not futures.ready():
        print "Progress: (%d/%d)" % (futures.completed_count(), future_result_len)
        sleep(2)

    user_docs = get_with_backoff(futures, [])
    if not user_docs:
        print "User docs was empty. Possibly connection problems."
        return

    authority_scaler = MinMaxScaler([doc['total_page_authority_f']['set'] for doc in user_docs])
    contribs_scaler = MinMaxScaler([doc['total_contribs_f']['set'] for doc in user_docs])
    for doc in user_docs:
        scaled_authority = authority_scaler.scale(doc['total_page_authority_f']['set'])
        scaled_contribs = contribs_scaler.scale(doc['total_contribs_f']['set'])
        doc['scaled_authority_f'] = {'set': scaled_authority}
        doc['scaled_contribs_f'] = {'set': scaled_contribs}
        doc['scaled_contribs_authority_f'] = {'set': scaled_authority * scaled_contribs}

    wiki_user_collection = solr.existing_collection(solr.wiki_user_collection())
    wiki_user_collection.add(user_docs)
    wiki_user_collection.commit()

    print "Analyzing topics"
    futures = group(get_wiki_topic_doc.s(wiki_data['id'], topic)
                    for topic in list(set(wiki_data['attr_entities']['set'])))()
    future_result_len = len(futures.results)
    counter = 0
    while not futures.ready():
        if counter % 5 == 0:
            print "Progress: (%d/%d)" % (futures.completed_count(), future_result_len)
        sleep(2)
        counter += 1
    topic_docs = get_with_backoff(futures, [])
    if not topic_docs:
        print "No topics, probably a connection error"
        return

    collection.add(topic_docs)
    collection.commit()

    topic_collection = solr.existing_collection(solr.all_topics_collection())
    topic_collection.add(topic_docs)
    topic_collection.commit()
def insert_contrib_data(args):
    try:
        use_caching(is_read_only=True, shouldnt_compute=True)
        db, cursor = get_db_and_cursor(args)
        wpe = WikiPageToEntitiesService().get_value(args.wid)
        if not wpe:
            print u"NO WIKI PAGE TO ENTITIES SERVICE FOR", args.wid
            return False
        authority_dict_fixed = get_authority_dict_fixed(args)
        if not authority_dict_fixed:
            return False
        print u"Inserting page and author and contrib data for wiki", args.wid
        for doc_id in authority_dict_fixed:
            wiki_id, article_id = doc_id.split(u'_')

            entity_data = wpe.get(doc_id, {})
            entity_list = filter(
                lambda x: x,
                map(
                    lambda x: x.strip(),
                    map(
                        my_escape,
                        list(
                            set(
                                entity_data.get(u'redirects', {}).values() +
                                entity_data.get(u'titles', []))))))

            cursor.execute(u"""
            SELECT topic_id FROM topics WHERE name IN ("%s")
            """ % (u'", "'.join(entity_list)))
            topic_ids = list(set([result[0] for result in cursor.fetchall()]))

            for topic_id in topic_ids:
                sql = u"""
                INSERT IGNORE INTO articles_topics (article_id, wiki_id, topic_id) VALUES (%s, %s, %s)
                """ % (article_id, wiki_id, topic_id)
                cursor.execute(sql)
                db.commit()

            cursor = db.cursor()

            for contribs in PageAuthorityService().get_value(doc_id, []):
                cursor.execute(u"""
                INSERT IGNORE INTO users (user_id, user_name) VALUES (%d, "%s")
                """ % (contribs[u'userid'], my_escape(contribs[u'user'])))
                db.commit()

                cursor.execute(u"""
                INSERT INTO articles_users (article_id, wiki_id, user_id, contribs) VALUES (%s, %s, %d, %s)
                """ % (article_id, wiki_id, contribs[u'userid'],
                       contribs[u'contribs']))
                db.commit()

                local_authority = contribs[
                    u'contribs'] * authority_dict_fixed.get(doc_id, 0)
                for topic_id in topic_ids:
                    cursor.execute(u"""
                    INSERT INTO topics_users (user_id, topic_id, local_authority) VALUES (%d, %s, %s)
                    ON DUPLICATE KEY UPDATE local_authority = local_authority + %s
                    """ % (contribs[u'userid'], topic_id, local_authority,
                           local_authority))
                    db.commit()
        db.commit()
        print u"Done with", args.wid
        return args
    except Exception as e:
        print e, traceback.format_exc()
        return False
예제 #9
0
def entities(wid):
    #pprint(WikiPageToEntitiesService().get_value(wid, {}))
    return WikiPageToEntitiesService().get_value(wid, {})
예제 #10
0
def insert_contrib_data(args):
    try:
        use_caching(is_read_only=True, shouldnt_compute=True)
        db, cursor = get_db_and_cursor(args)
        wpe = WikiPageToEntitiesService().get_value(args.wid)
        if not wpe:
            print u"NO WIKI PAGE TO ENTITIES SERVICE FOR", args.wid
            return False
        authority_dict_fixed = get_authority_dict_fixed(args)
        if not authority_dict_fixed:
            return False
        print u"Inserting page and author and contrib data for wiki", args.wid
        for doc_id in authority_dict_fixed:
            wiki_id, article_id = doc_id.split(u"_")

            entity_data = wpe.get(doc_id, {})
            entity_list = filter(
                lambda x: x,
                map(
                    lambda x: x.strip(),
                    map(
                        my_escape,
                        list(set(entity_data.get(u"redirects", {}).values() + entity_data.get(u"titles", []))),
                    ),
                ),
            )

            cursor.execute(
                u"""
            SELECT topic_id FROM topics WHERE name IN ("%s")
            """
                % (u'", "'.join(entity_list))
            )
            topic_ids = list(set([result[0] for result in cursor.fetchall()]))

            for topic_id in topic_ids:
                sql = u"""
                INSERT IGNORE INTO articles_topics (article_id, wiki_id, topic_id) VALUES (%s, %s, %s)
                """ % (
                    article_id,
                    wiki_id,
                    topic_id,
                )
                cursor.execute(sql)
                db.commit()

            cursor = db.cursor()

            for contribs in PageAuthorityService().get_value(doc_id, []):
                cursor.execute(
                    u"""
                INSERT IGNORE INTO users (user_id, user_name) VALUES (%d, "%s")
                """
                    % (contribs[u"userid"], my_escape(contribs[u"user"]))
                )
                db.commit()

                cursor.execute(
                    u"""
                INSERT INTO articles_users (article_id, wiki_id, user_id, contribs) VALUES (%s, %s, %d, %s)
                """
                    % (article_id, wiki_id, contribs[u"userid"], contribs[u"contribs"])
                )
                db.commit()

                local_authority = contribs[u"contribs"] * authority_dict_fixed.get(doc_id, 0)
                for topic_id in topic_ids:
                    cursor.execute(
                        u"""
                    INSERT INTO topics_users (user_id, topic_id, local_authority) VALUES (%d, %s, %s)
                    ON DUPLICATE KEY UPDATE local_authority = local_authority + %s
                    """
                        % (contribs[u"userid"], topic_id, local_authority, local_authority)
                    )
                    db.commit()
        db.commit()
        print u"Done with", args.wid
        return args
    except Exception as e:
        print e, traceback.format_exc()
        return False