def get_coden_formatted(bfo, separator=','): """Produces a SPIRES-style coden reference for a record This typically looks like JOURNAL-CODEN,VOLUME,FIRSTPAGE If this is impossible (for example if we lack sufficient information about the reference or lack a CODEN lookup KB), produces empty string. @param separator A string used to join the pieces of the reference """ coden = '' publication_info = bfo.fields('773__') if publication_info: publication_info = publication_info[0] else: return '' journal = publication_info.get('p', '') volume = publication_info.get('v', '') pages = publication_info.get('c', '') if pages: dashpos = pages.find('-') if dashpos > -1: pages = pages[:dashpos] try: if journal and (volume != '' or pages != ''): coden = separator.join([get_kbr_keys("CODENS", searchvalue=journal, searchtype='e')[0][0], volume, pages]) except: return '' return coden
def authorships(self, req, form): """ Return list of authors used for auto-completion in the authors field. Return response as JSON. """ argd = wash_urlargd(form, {"publicationid": (str, ""), "term": (str, "")}) user_info = collect_user_info(req) uid = user_info["uid"] req.content_type = "application/json" term = argd["term"] publicationid = argd["publicationid"] ret = get_favourite_authorships_for_user(uid, publicationid, term) if ret: return json.dumps(ret) if ":" in term: ## an institution is being typed name, institute = term.split(":", 1) institute = institute.strip() if len(institute) > 1: institutes = [row[0] for row in get_kbr_keys("institutes", searchkey=institute, searchtype="s")] institutes.sort() return json.dumps(["%s: %s" % (name, institute) for institute in institutes[:100]]) return json.dumps([])
def authorships(self, req, form): """ Return list of authors used for auto-completion in the authors field. Return response as JSON. """ argd = wash_urlargd( form, {'publicationid': (str, ''), 'term': (str, '')}) user_info = collect_user_info(req) uid = user_info['uid'] req.content_type = 'application/json' term = argd['term'] publicationid = argd['publicationid'] ret = get_favourite_authorships_for_user(uid, publicationid, term) if ret: return json.dumps(ret) if ':' in term: ## an institution is being typed name, institute = term.split(':', 1) institute = institute.strip() if len(institute) > 1: institutes = [row[0] for row in get_kbr_keys( 'institutes', searchkey=institute, searchtype='s')] institutes.sort() return json.dumps(["%s: %s" % (name, institute) for institute in institutes[:100]]) return json.dumps([])
def authorships(self, req, form): """ Return list of authors used for auto-completion in the authors field. Return response as JSON. """ argd = wash_urlargd(form, { 'publicationid': (str, ''), 'term': (str, '') }) user_info = collect_user_info(req) uid = user_info['uid'] req.content_type = 'application/json' term = argd['term'] publicationid = argd['publicationid'] ret = get_favourite_authorships_for_user(uid, publicationid, term) if ret: return json.dumps(ret) if ':' in term: ## an institution is being typed name, institute = term.split(':', 1) institute = institute.strip() if len(institute) > 1: institutes = [ row[0] for row in get_kbr_keys( 'institutes', searchkey=institute, searchtype='s') ] institutes.sort() return json.dumps([ "%s: %s" % (name, institute) for institute in institutes[:100] ]) return json.dumps([])
def get_coden_formatted(bfo, separator=','): """Produces a SPIRES-style coden reference for a record This typically looks like JOURNAL-CODEN,VOLUME,FIRSTPAGE If this is impossible (for example if we lack sufficient information about the reference or lack a CODEN lookup KB), produces empty string. @param separator A string used to join the pieces of the reference """ coden = '' publication_info = bfo.fields('773__') if publication_info: publication_info = publication_info[0] else: return '' journal = publication_info.get('p', '') volume = publication_info.get('v', '') pages = publication_info.get('c', '') if pages: dashpos = pages.find('-') if dashpos > -1: pages = pages[:dashpos] try: if journal and (volume != '' or pages != ''): coden = separator.join([ get_kbr_keys("CODENS", searchvalue=journal, searchtype='e')[0][0], volume, pages ]) except: return '' return coden
def test_EJOURNALS_keys(self): """bibknowledge - test left/right rules (key lookups)""" mykeys = get_kbr_keys("EJOURNALS", "Acta") self.assertEqual(2, len(mykeys)) mykeys = get_kbr_values("EJOURNALS", '', searchtype='e') self.assertEqual(0, len(mykeys)) mykeys = get_kbr_values("EJOURNALS", searchtype='s') self.assertEqual(327, len(mykeys)) mykeys = get_kbr_values("EJOURNALS", searchkey='', searchtype='s') self.assertEqual(327, len(mykeys))
def test_EJOURNALS_keys(self): """bibknowledge - test a left/right rule""" mykeys = get_kbr_keys("EJOURNALS", "Acta") self.assertEqual(2, len(mykeys))
def load_kbs(cfg, run_sql, in_task=False): for kb, query in cfg.iteritems(): task_sleep_now_if_required(can_stop_too=True) if not kb_exists(kb): add_kb(kb) if in_task: write_message("Updating %s KB..." % kb) try: if not in_task: print "kb:", kb print "kb beginning:", len(get_kb_mappings(kb)) if kb.startswith('json_'): encoder = ComplexEncoder() mapping, description = run_sql(query, with_desc=True) if kb in CFG_ADDITIONAL_ENTRIES: mapping += CFG_ADDITIONAL_ENTRIES[kb] if not in_task: print CFG_ADDITIONAL_ENTRIES[kb] column_counter = {} new_description = [] for column in description[1:]: column = column[0] counter = column_counter[column] = column_counter.get( column, 0) + 1 if counter > 1: new_description.append('%s%d' % (column, counter)) else: new_description.append(column) description = new_description else: mapping = run_sql(query) if kb in CFG_ADDITIONAL_ENTRIES: mapping += CFG_ADDITIONAL_ENTRIES[kb] if not in_task: print CFG_ADDITIONAL_ENTRIES[kb] if not in_task: print "mapping:", len(mapping) if kb == 'projects': mapping += [('000000', 'NO PROJECT')] original_keys = set([key[0] for key in get_kbr_keys(kb)]) if not in_task: print "original_keys before:", len(original_keys) updated = 0 added = 0 for i, row in enumerate(mapping): key, value = row[0], row[1:] if kb.startswith('json_'): value = encoder.encode(dict(zip(description, value))) else: value = value[0] if value: if key in original_keys: original_keys.remove(key) if in_task: task_update_progress("%s - %s%%" % (kb, i * 100 / len(mapping))) if kb_mapping_exists(kb, key): updated += 1 update_kb_mapping(kb, key, key, value) else: added += 1 add_kb_mapping(kb, key, value) if not in_task: print "updated:", updated, "added:", added print "kb after update:", len(get_kb_mappings(kb)) print "original_keys after:", len(original_keys) if in_task: task_update_progress("Cleaning %s" % kb) for key in original_keys: remove_kb_mapping(kb, key) if not in_task: print "kb after remove:", len(get_kb_mappings(kb)) except: register_exception(alert_admin=True, prefix="Error when updating KB %s" % kb) continue
def load_kbs(cfg, run_sql, in_task=False): for kb, query in cfg.iteritems(): task_sleep_now_if_required(can_stop_too=True) if not kb_exists(kb): add_kb(kb) if in_task: write_message("Updating %s KB..." % kb) try: if not in_task: print "kb:", kb print "kb beginning:", len(get_kb_mappings(kb)) if kb.startswith('json_'): encoder = ComplexEncoder() mapping, description = run_sql(query, with_desc=True) if kb in CFG_ADDITIONAL_ENTRIES: mapping += CFG_ADDITIONAL_ENTRIES[kb] if not in_task: print CFG_ADDITIONAL_ENTRIES[kb] column_counter = {} new_description = [] for column in description[1:]: column = column[0] counter = column_counter[column] = column_counter.get(column, 0) + 1 if counter > 1: new_description.append('%s%d' % (column, counter)) else: new_description.append(column) description = new_description else: mapping = run_sql(query) if kb in CFG_ADDITIONAL_ENTRIES: mapping += CFG_ADDITIONAL_ENTRIES[kb] if not in_task: print CFG_ADDITIONAL_ENTRIES[kb] if not in_task: print "mapping:", len(mapping) if kb == 'projects': mapping += [('000000', 'NO PROJECT')] original_keys = set([key[0] for key in get_kbr_keys(kb)]) if not in_task: print "original_keys before:", len(original_keys) updated = 0 added = 0 for i, row in enumerate(mapping): key, value = row[0], row[1:] if kb.startswith('json_'): value = encoder.encode(dict(zip(description, value))) else: value = value[0] if value: if key in original_keys: original_keys.remove(key) if in_task: task_update_progress("%s - %s%%" % (kb, i * 100 / len(mapping))) if kb_mapping_exists(kb, key): updated += 1 update_kb_mapping(kb, key, key, value) else: added += 1 add_kb_mapping(kb, key, value) if not in_task: print "updated:", updated, "added:", added print "kb after update:", len(get_kb_mappings(kb)) print "original_keys after:", len(original_keys) if in_task: task_update_progress("Cleaning %s" % kb) for key in original_keys: remove_kb_mapping(kb, key) if not in_task: print "kb after remove:", len(get_kb_mappings(kb)) except: register_exception(alert_admin=True, prefix="Error when updating KB %s" % kb) continue