def main() -> None: utils.setup_logger(__file__) logging.info('generate_suggestions.py -- starting') suggest = SolrCollection(os.getenv('SOLR_COLLECTION_SUGGESTER')) search = SolrCollection(os.getenv('SOLR_COLLECTION_SEARCH')) logging.info('clearing suggestions') suggest.delete_documents('*:*', commit=False) relation_counts = search.get_facet_counts('relation') community_uri_to_name = search.select_all_documents( fq='sys_type:community', fl=['sys_uri', 'sys_name'], id_field='sys_id' ) community_uri_to_name = {community['sys_uri']: community['sys_name'] for community in community_uri_to_name} suggestion_types = utils.load_resource('suggestions') doc_suggestions = {doc_type: get_doc_suggestions( search, doc_type, config['mapping'], relation_counts, community_uri_to_name) for doc_type, config in suggestion_types.items()} logging.info('adding title suggestions:') for doc_type, doc_type_suggestions in doc_suggestions.items(): suggest.index_documents(doc_type_suggestions, commit=False) logging.info(' titles: %s of type %s', len(doc_type_suggestions), doc_type) user_defined_synonym_suggestions = {doc_type: get_doc_suggestions( search, doc_type, config['user_defined_synonyms'], relation_counts, community_uri_to_name, 'user_defined_synonyms:[* TO *]') for doc_type, config in suggestion_types.items() if 'user_defined_synonyms' in config} logging.info('adding user defined synonym suggestions:') for doc_type, doc_type_suggestions in user_defined_synonym_suggestions.items(): suggest.index_documents(doc_type_suggestions, commit=False) logging.info(' user defined synonyms: %s of type %s', len(doc_type_suggestions), doc_type) context_suggestions = { doc_type: { relation: get_suggestions(search, doc_type, relation, suggestion_types[relation]['mapping'], community_uri_to_name) } for doc_type, config in suggestion_types.items() for relation in config['relations']} logging.info('adding context suggestions:') for doc_type, relations in context_suggestions.items(): for relation, suggestions in relations.items(): suggest.index_documents(suggestions, commit=False) logging.info(' titles: %s of type %s in context of %s', len(suggestions), relation, doc_type) logging.info('adding theme suggestions:') theme_suggestions = get_theme_suggestions(search, 'dataset') suggest.index_documents(theme_suggestions, commit=False) logging.info(' themes: %s in context of %s', len(theme_suggestions), 'dataset') logging.info('committing changes to index') suggest.index_documents([], commit=True) logging.info('building Solr suggester') suggest.build_suggestions('build_suggest') logging.info('generate_suggestions.py -- finished')
def main() -> None: utils.setup_logger(__file__) logging.info('aggregate_signals.py started') signal_collection = SolrCollection(os.getenv('SOLR_COLLECTION_SIGNALS')) signal_aggregated_collection = SolrCollection( os.getenv('SOLR_COLLECTION_SIGNALS_AGGREGATED')) signals = signal_collection.select_all_documents() aggregated_signals = signal_aggregated_collection.select_all_documents() signal_aggregated_collection.index_documents( get_aggregations(aggregate_fields(signals, ['query', 'handler']), aggregated_signals, 'query')) signal_aggregated_collection.index_documents( get_aggregations( aggregate_fields( reduce_date_field_to_hours(signals, 'search_timestamp'), ['search_timestamp', 'handler']), aggregated_signals, 'search_timestamp')) signal_aggregated_collection.index_documents( get_aggregations( aggregate_fields(preprocess_filters(signals), ['filters', 'handler']), aggregated_signals, 'filters')) signal_collection.delete_documents('*:*') logging.info('aggregate_signals.py finished')
def get_suggestions(search_core: SolrCollection, in_context: str, doc_type: str, mappings: dict, communities: dict) -> list: """ Get suggestions of a given doc_type in a given context from the search core :param search_core: The search core to get suggestions from :param in_context: The context :param doc_type: The doc type :param mappings: The mappings of the given context :param dict communities: A dictionary with community URIs as keys and community names as value :return: The list of suggestions """ # For context suggestions search for payload mapping and set it to # sys_uri => payload delete_mappings = [key for key, value in mappings.items() if 'payload' in value] for mapping in delete_mappings: del mappings[mapping] mappings['sys_uri'] = ['payload'] dict_mapper = DictMapper(mappings) doc_entities = search_core.select_all_documents( 'sys_type:"{0}" AND sys_uri:[* TO *]'.format(doc_type), id_field='sys_id' ) context_entities = search_core.select_all_documents( 'sys_type:"{0}" AND relation:[* TO *]'.format(in_context), ['relation'], id_field='sys_id' ) counts = {} for doc_entity in doc_entities: for context_entity in context_entities: if doc_entity['sys_uri'] in context_entity['relation']: counts[doc_entity['sys_uri']] = \ counts[doc_entity['sys_uri']] + 1 \ if doc_entity['sys_uri'] in counts else 1 suggestions = [] for doc_entity in doc_entities: entity = dict_mapper.apply_map(doc_entity) if 'relation_community' in entity: names = [] for community_uri in entity['relation_community']: if community_uri in communities: names.append(communities[community_uri]) entity['relation_community'] = names if doc_entity['sys_uri'] not in counts: continue entity.update({ 'weight': counts[doc_entity['sys_uri']] if doc_entity['sys_uri'] in counts else 0, 'in_context_of': in_context, 'language': ['nl', 'en'], 'type': [suggestion_type + '_filter' for suggestion_type in entity['type']] if 'type' in entity else ['filter'] }) suggestions.append(entity) return suggestions
def update_reverse_relations(searcher: SolrCollection) -> None: """ Ensures that the relations in the following example are mirrored: [{ "sys_type": "A", "identifier": "Foo", "relation_B": "Bar" }, { "sys_type": "B", "identifier": "Bar" }] Afterwards the relations are set as: [{ "sys_type": "A", "identifier": "Foo", "relation_B": "Bar" }, { "sys_type": "B", "identifier": "Bar", "relation_A": "Foo" }] This ensures that all relations are traversable regardless which object is used as a reference point. :param SolrCollection searcher: The searcher to find and update objects with """ relations = utils.load_resource('relations') for source_object, source_data in relations.items(): for relation, mapping in source_data.items(): logging.info('updating reverse relations from %s to %s', source_object, relation) field_entities = searcher.select_all_documents( 'sys_type:{0}'.format(source_object), ['sys_id', mapping['match'], mapping['to']], id_field='sys_id') field_entities = { entity[mapping['match']]: entity for entity in field_entities } relation_entities = searcher.select_all_documents( 'sys_type:{0}'.format(relation), [mapping['match'], mapping['from']], id_field='sys_id') entities_to_relation_entities = {} for relation_entity in relation_entities: if mapping['from'] not in relation_entity: continue for uri in relation_entity[mapping['from']]: if uri in field_entities.keys(): if uri not in entities_to_relation_entities: entities_to_relation_entities[uri] = [] entities_to_relation_entities[uri].append( relation_entity[mapping['match']]) logging.info( ' found %s objects of type %s with relations to' ' objects of type %s', len(entities_to_relation_entities.keys()), relation, source_object) deletes = [{ 'sys_id': field_entity['sys_id'], mapping['to']: { 'remove': field_entity[mapping['to']] } } for field_entity in field_entities if mapping['to'] in field_entity and field_entity[mapping['match']] not in entities_to_relation_entities.keys()] updates = [{ 'sys_id': field_entities[uri]['sys_id'], mapping['to']: { 'set': entities_to_relation_entities[uri] } } for uri in entities_to_relation_entities] searcher.index_documents(deletes, commit=False) searcher.index_documents(updates, commit=False) logging.info('results') logging.info(' deleted: %s', len(deletes)) logging.info(' updated: %s', len(updates))