def prepare_internal_notes(self, obj): """ Prepare the internal_notes (or x- and n-tagged varfields) """ item_vf_set = obj.record_metadata.varfield_set.all() inotes = helpers.get_varfield_vals(item_vf_set, 'x', many=True) inotes.extend(helpers.get_varfield_vals(item_vf_set, 'n', many=True)) return inotes
def prepare_internal_notes(self, obj): ''' Prepare the internal_notes (or x- and n-tagged varfields) ''' item_vf_set = obj.record_metadata.varfield_set.all() inotes = helpers.get_varfield_vals(item_vf_set, 'x', many=True) inotes.extend(helpers.get_varfield_vals(item_vf_set, 'n', many=True)) return inotes
def prepare_barcode(self, obj): ''' Prepare the barcode; grab it from the varfields on this item record. ''' item_vf_set = obj.record_metadata.varfield_set.all() return helpers.get_varfield_vals(item_vf_set, 'b')
def prepare_volume(self, obj): ''' Prepare the volume number; grab it from the varfields on this item record. ''' item_vf_set = obj.record_metadata.varfield_set.all() return helpers.get_varfield_vals(item_vf_set, 'v')
def prepare_barcode(self, obj): """ Prepare the barcode; grab it from the varfields on this item record. """ item_vf_set = obj.record_metadata.varfield_set.all() return helpers.get_varfield_vals(item_vf_set, 'b')
def prepare_volume(self, obj): """ Prepare the volume number; grab it from the varfields on this item record. """ item_vf_set = obj.record_metadata.varfield_set.all() return helpers.get_varfield_vals(item_vf_set, 'v')
def get_call_number(self, obj): ''' Determines the most specific call number on an item or the attached bib as well as the type of the call number. Returns a tuple: (call_number, cn_type). Type is determined based on our indexing rules for call numbers--item_cn_specs for items and bib_cn_specs for bibs. (See the BibRecord and ItemRecord models for details.) ''' cn_tuple = None bib = obj.bibrecorditemrecordlink_set.all() try: cn_tuple = obj.get_call_numbers()[0] except IndexError: # If there's no matching item call no, we try a bib call no. if bib: try: cn_tuple = bib[0].bib_record.get_call_numbers()[0] except IndexError: pass # Last: might have a periodical shelved by title. In that case, # we'll append the title to the end of the call no. item_vfs = obj.record_metadata.varfield_set.all() probably_shelved_by_title = ( obj.itype.code_num == 5 and (not cn_tuple or re.search(r'^[A-Za-z]{,2}$', cn_tuple[0])) or (cn_tuple and re.search(r'^periodical', cn_tuple[0], re.IGNORECASE))) if not probably_shelved_by_title: for cn in helpers.get_varfield_vals(item_vfs, 'c', many=True, content_method='display_field_content'): if cn.strip().upper() == 'SHELVED BY TITLE': probably_shelved_by_title = True break if probably_shelved_by_title and bib: bib = bib[0].bib_record title = bib.bibrecordproperty_set.all()[0].best_title title = re.sub(r'\.*\s*$', r'', title) call_number = '{} -- {}'.format(cn_tuple[0], title) cn_tuple = (call_number, 'other') return cn_tuple or (None, None)
def prepare_holdings(self, obj): ret_val = [] data_map = [] rec_num = obj.record_metadata.get_iii_recnum(True) for h in obj.holding_records.all(): try: h_rec_num = h.record_metadata.get_iii_recnum(True) bib_vf = h.bibrecord_set.all()[0].record_metadata.\ varfield_set.all() except IndexError: pass else: title = helpers.get_varfield_vals(bib_vf, 't', '245', cm_kw_params={'subfields': 'a'}, content_method='display_field_content') ret_val.append(title) data_map.append(h_rec_num) self.h_lists[rec_num] = data_map return ret_val
def prepare_holdings(self, obj): ret_val = [] data_map = [] rec_num = obj.record_metadata.get_iii_recnum(True) for h in obj.holding_records.all(): try: h_rec_num = h.record_metadata.get_iii_recnum(True) bib_vf = h.bibrecord_set.all()[0].record_metadata.\ varfield_set.all() except IndexError: pass else: title = helpers.get_varfield_vals( bib_vf, 't', '245', cm_kw_params={'subfields': 'a'}, content_method='display_field_content') ret_val.append(title) data_map.append(h_rec_num) self.h_lists[rec_num] = data_map return ret_val
def prepare_public_notes(self, obj): ''' Prepare public_notes (or i-tagged varfields) ''' item_vf_set = obj.record_metadata.varfield_set.all() return helpers.get_varfield_vals(item_vf_set, 'p', many=True)
def prepare_long_messages(self, obj): ''' Prepare the "long_messages" (or m-tagged varfields) ''' item_vf_set = obj.record_metadata.varfield_set.all() return helpers.get_varfield_vals(item_vf_set, 'm', many=True)
def export_records(self, records): eresources, er_mapping = set(), {} # First we loop through the holding records and determine which # eresources need to be updated. er_mapping maps eresource rec # nums to lists of holdings rec nums to update. rev_handler = redisobjs.RedisObject('reverse_holdings_list', '0') reverse_holdings_list = rev_handler.get() or {} for h in records: h_rec_num = h.record_metadata.get_iii_recnum(True) old_er_rec_num = reverse_holdings_list.get(h_rec_num, None) try: er_record = h.resourcerecord_set.all()[0] except IndexError: er_record, er_rec_num = None, None else: er_rec_num = er_record.record_metadata.get_iii_recnum(True) if old_er_rec_num and old_er_rec_num != er_rec_num: # if the current attached er rec_num in Sierra is # different than what's in Redis, then we need to # delete this holding from the old er record. old_h_data = er_mapping.get(old_er_rec_num, []) old_h_data.append({ 'delete': True, 'rec_num': h_rec_num, 'title': None }) er_mapping[old_er_rec_num] = { 'er_record': None, 'holdings': old_h_data } if er_rec_num: holdings = er_mapping.get(er_rec_num, {}).get('holdings', []) try: vf = h.bibrecord_set.all()[0].record_metadata\ .varfield_set.all() except IndexError: title = None else: title = helpers.get_varfield_vals( vf, 't', '245', cm_kw_params={'subfields': 'a'}, content_method='display_field_content') data = {'delete': False, 'title': title, 'rec_num': h_rec_num} holdings.append(data) er_mapping[er_rec_num] = { 'er_record': er_record, 'holdings': holdings } h_vals = {} #self.log('Info', er_mapping) for er_rec_num, entry in er_mapping.iteritems(): er_record, holdings = entry['er_record'], entry['holdings'] # if we've already indexed the eresource this holding is # attached to, then we want to pull the record from Solr # and make whatever changes to it rather than reindex the # whole record and all attached holdings from scratch. # Since export jobs get broken up and run in parallel, we # want to hold off on actually committing to Solr and # updating Redis until the callback runs. s = solr.Queryset().filter(record_number=er_rec_num) if s.count() > 0: rec_queue = h_vals.get(er_rec_num, {}) rec_append_list = rec_queue.get('append', []) rec_delete_list = rec_queue.get('delete', []) record = s[0] red = redisobjs.RedisObject('eresource_holdings_list', er_rec_num) red_h_list = red.get() for data in holdings: try: red_h_index = red_h_list.index(data.get('rec_num')) except AttributeError: self.log('Info', '{}'.format(data.get('rec_num'))) except ValueError: record.holdings.append(data.get('title')) rec_append_list.append(data.get('rec_num')) else: if data.get('delete'): # we wait until the final callback to # delete anything from Solr, because that # will mess up our holdings index number rec_delete_list.append(data.get('rec_num')) else: record.holdings[red_h_index] = data.get('title') record.save(commit=False) rec_queue['append'] = rec_append_list rec_queue['delete'] = rec_delete_list h_vals[er_rec_num] = rec_queue else: # if we haven't indexed the record already, we'll add # it using the Haystack indexer. eresources.add(er_record) if eresources: eresources = list(eresources) ret_er_vals = self.eresources_to_solr.export_records(eresources) return {'holdings': h_vals, 'eresources': ret_er_vals}
def export_records(self, records, vals={}): log_label = self.__class__.__name__ eresources = set() er_mapping = {} # First we loop through the holding records and determine which # eresources need to be updated. er_mapping maps eresource rec # nums to lists of holdings rec nums to update. rev_handler = redisobjs.RedisObject('reverse_holdings_list', '0') reverse_holdings_list = rev_handler.get() or {} for h in records: h_rec_num = h.record_metadata.get_iii_recnum(True) old_er_rec_num = reverse_holdings_list.get(h_rec_num, None) try: er_rec_num = h.resourcerecord_set.all()[0]\ .record_metadata.get_iii_recnum(True) except IndexError: er_rec_num = None if old_er_rec_num and old_er_rec_num != er_rec_num: # if the current attached er rec_num in Sierra is # different than what's in Redis, then we need to # delete this holding from the old er record. old_h_data = er_mapping.get(old_er_rec_num, []) old_h_data.append({ 'delete': True, 'rec_num': h_rec_num, 'title': None }) er_mapping[old_er_rec_num] = old_h_data if er_rec_num: holding_data = er_mapping.get(er_rec_num, []) try: vf = h.bibrecord_set.all()[0].record_metadata\ .varfield_set.all() except IndexError: title = None else: title = helpers.get_varfield_vals(vf, 't', '245', cm_kw_params={'subfields': 'a'}, content_method='display_field_content') data = { 'delete': False, 'title': title, 'rec_num': h_rec_num } holding_data.append(data) er_mapping[er_rec_num] = holding_data h_vals = vals.get('holdings', {}) #self.log('Info', er_mapping) for er_rec_num, holdings in er_mapping.iteritems(): # if we've already indexed the eresource this holding is # attached to, then we want to pull the record from Solr # and make whatever changes to it rather than reindex the # whole record and all attached holdings from scratch. # Since export jobs get broken up and run in parallel, we # want to hold off on actually committing to Solr and # updating Redis until the callback runs. s = solr.Queryset().filter(record_number=er_rec_num) if s.count() > 0: rec_queue = h_vals.get(er_rec_num, {}) rec_append_list = rec_queue.get('append', []) rec_delete_list = rec_queue.get('delete', []) record = s[0] red = redisobjs.RedisObject('eresource_holdings_list', er_rec_num) red_h_list = red.get() for data in holdings: try: red_h_index = red_h_list.index(data.get('rec_num')) except AttributeError: self.log('Info', '{}'.format(data.get('rec_num'))) except ValueError: record.holdings.append(data.get('title')) rec_append_list.append(data.get('rec_num')) else: if data.get('delete'): # we wait until the final callback to # delete anything from Solr, because that # will mess up our holdings index number rec_delete_list.append(data.get('rec_num')) else: record.holdings[red_h_index] = data.get('title') record.save(commit=False) rec_queue['append'] = rec_append_list rec_queue['delete'] = rec_delete_list h_vals[er_rec_num] = rec_queue else: # if we haven't indexed the record already, we'll add # it using the Haystack indexer. eresources.add(e) vals['holdings'] = h_vals if eresources: eresources = list(eresources) er_vals = vals.get('eresources', {}) er_vals.update(self.eresources_to_solr(self.instance.pk, self.export_filter, self.export_type, self.options) .export_records(eresources, er_vals)) vals['eresources'] = er_vals return vals
def prepare_public_notes(self, obj): """ Prepare public_notes (or i-tagged varfields) """ item_vf_set = obj.record_metadata.varfield_set.all() return helpers.get_varfield_vals(item_vf_set, 'p', many=True)
def prepare_long_messages(self, obj): """ Prepare the "long_messages" (or m-tagged varfields) """ item_vf_set = obj.record_metadata.varfield_set.all() return helpers.get_varfield_vals(item_vf_set, 'm', many=True)