def perform_upload_check(xml_record, mode): """ Performs a upload simulation with the given record and mode @return: string describing errors @rtype: string """ error_cache = [] def my_writer(msg, stream=sys.stdout, verbose=1): if verbose == 1: if 'DONE' not in msg: error_cache.append(msg.strip()) orig_writer = bibupload_module.write_message bibupload_module.write_message = my_writer error_cache.extend(perform_basic_upload_checks(xml_record)) if error_cache: # There has been some critical error return '\n'.join(error_cache) recs = xml_marc_to_records(xml_record) try: upload_mode = mode[2:] # Adapt input data for bibupload function if upload_mode == "r insert-or-replace": upload_mode = "replace_or_insert" for record in recs: if record: record_strip_empty_volatile_subfields(record) record_strip_empty_fields(record) bibupload(record, opt_mode=upload_mode, pretend=True) finally: bibupload_module.write_message = orig_writer return '\n'.join(error_cache)
def save_xml_record(recid, uid, xml_record='', to_upload=True, to_merge=False, spec_name=''): """Write XML record to file. Default behaviour is to read the record from a BibEdit cache file, filter out the unchanged volatile subfields, write it back to an XML file and then pass this file to BibUpload. @param xml_record: give XML as string in stead of reading cache file @param to_upload: pass the XML file to BibUpload @param to_merge: prepare an XML file for BibMerge to use """ if not xml_record: # Read record from cache file. cache = get_cache_file_contents(recid, uid) if cache: record = cache[2] used_changes = cache[4] # record_strip_empty_fields(record) # now performed for every record after removing unfilled volatile fields xml_record = record_xml_output(record) delete_cache_file(recid, uid) delete_disabled_changes(used_changes) else: record = create_record(xml_record)[0] # clean the record from unfilled volatile fields record_strip_empty_volatile_subfields(record) record_strip_empty_fields(record) # order subfields alphabetically before saving the record #TP: nechceme record_order_subfields(record) xml_to_write = wash_for_xml(record_xml_output(record)) # Write XML file. if not to_merge: file_path = '%s.xml' % _get_file_path(recid, uid) else: file_path = '%s_%s.xml' % (_get_file_path( recid, uid), CFG_BIBEDIT_TO_MERGE_SUFFIX) xml_file = open(file_path, 'w') xml_file.write(xml_to_write) xml_file.close() user_name = get_user_info(uid)[1] if to_upload: # TP: check whether to add spec name if spec_name == '': # Pass XML file to BibUpload. task_low_level_submission('bibupload', 'bibedit', '-P', '5', '-r', file_path, '-u', user_name) else: task_low_level_submission('bibupload', 'bibedit', '-P', '5', '-r', file_path, '-u', user_name, '-N', spec_name) return True
def save_xml_record(recid, uid, xml_record='', to_upload=True, to_merge=False, task_name="bibedit", sequence_id=None): """Write XML record to file. Default behaviour is to read the record from a BibEdit cache file, filter out the unchanged volatile subfields, write it back to an XML file and then pass this file to BibUpload. @param xml_record: give XML as string in stead of reading cache file @param to_upload: pass the XML file to BibUpload @param to_merge: prepare an XML file for BibMerge to use """ if not xml_record: # Read record from cache file. cache = get_cache_contents(recid, uid) if cache: record = cache[2] used_changes = cache[4] xml_record = record_xml_output(record) delete_cache(recid, uid) delete_disabled_changes(used_changes) else: record = create_record(xml_record)[0] # clean the record from unfilled volatile fields record_strip_empty_volatile_subfields(record) record_strip_empty_fields(record) # order subfields alphabetically before saving the record record_order_subfields(record) xml_to_write = wash_for_xml(record_xml_output(record)) # Write XML file. if not to_merge: fd, file_path = tempfile.mkstemp(dir=CFG_BIBEDIT_CACHEDIR, prefix="%s_" % CFG_BIBEDIT_FILENAME, suffix="_%s_%s.xml" % (recid, uid)) f = os.fdopen(fd, 'w') f.write(xml_to_write) f.close() else: file_path = '%s_%s.xml' % (_get_file_path(recid, uid), CFG_BIBEDIT_TO_MERGE_SUFFIX) xml_file = open(file_path, 'w') xml_file.write(xml_to_write) xml_file.close() user_name = get_user_info(uid)[1] if to_upload: args = ['bibupload', user_name, '-P', '5', '-r', file_path, '-u', user_name] if task_name == "bibedit": args.extend(['--name', 'bibedit']) if sequence_id: args.extend(["-I", sequence_id]) args.extend(['--email-logs-on-error']) task_low_level_submission(*args) return True
def save_xml_record(recid, uid, xml_record='', to_upload=True, to_merge=False, spec_name=''): """Write XML record to file. Default behaviour is to read the record from a BibEdit cache file, filter out the unchanged volatile subfields, write it back to an XML file and then pass this file to BibUpload. @param xml_record: give XML as string in stead of reading cache file @param to_upload: pass the XML file to BibUpload @param to_merge: prepare an XML file for BibMerge to use """ if not xml_record: # Read record from cache file. cache = get_cache_file_contents(recid, uid) if cache: record = cache[2] used_changes = cache[4] # record_strip_empty_fields(record) # now performed for every record after removing unfilled volatile fields xml_record = record_xml_output(record) delete_cache_file(recid, uid) delete_disabled_changes(used_changes) else: record = create_record(xml_record)[0] # clean the record from unfilled volatile fields record_strip_empty_volatile_subfields(record) record_strip_empty_fields(record) # order subfields alphabetically before saving the record #TP: nechceme record_order_subfields(record) xml_to_write = wash_for_xml(record_xml_output(record)) # Write XML file. if not to_merge: file_path = '%s.xml' % _get_file_path(recid, uid) else: file_path = '%s_%s.xml' % (_get_file_path(recid, uid), CFG_BIBEDIT_TO_MERGE_SUFFIX) xml_file = open(file_path, 'w') xml_file.write(xml_to_write) xml_file.close() user_name = get_user_info(uid)[1] if to_upload: # TP: check whether to add spec name if spec_name == '': # Pass XML file to BibUpload. task_low_level_submission('bibupload', 'bibedit', '-P', '5', '-r', file_path, '-u', user_name) else: task_low_level_submission('bibupload', 'bibedit', '-P', '5', '-r', file_path, '-u', user_name, '-N', spec_name) return True
def add_record_cnum(recid, uid): """ Check if the record has already a cnum. If not generate a new one and return the result @param recid: recid of the record under check. Used to retrieve cache file @type recid: int @param uid: id of the user. Used to retrieve cache file @type uid: int @return: None if cnum already present, new cnum otherwise @rtype: None or string """ # Import placed here to avoid circular dependency from invenio.sequtils_cnum import CnumSeq, ConferenceNoStartDateError record_revision, record, pending_changes, deactivated_hp_changes, \ undo_list, redo_list = get_cache_file_contents(recid, uid)[1:] record_strip_empty_volatile_subfields(record) # Check if record already has a cnum tag_111__g_content = record_get_field_value(record, "111", " ", " ", "g") if tag_111__g_content: return else: cnum_seq = CnumSeq() try: new_cnum = cnum_seq.next_value( xml_record=wash_for_xml(print_rec(record))) except ConferenceNoStartDateError: return None field_add_subfield(record['111'][0], 'g', new_cnum) update_cache_file_contents(recid, uid, record_revision, record, \ pending_changes, \ deactivated_hp_changes, \ undo_list, redo_list) return new_cnum
def add_record_cnum(recid, uid): """ Check if the record has already a cnum. If not generate a new one and return the result @param recid: recid of the record under check. Used to retrieve cache file @type recid: int @param uid: id of the user. Used to retrieve cache file @type uid: int @return: None if cnum already present, new cnum otherwise @rtype: None or string """ # Import placed here to avoid circular dependency from invenio.sequtils_cnum import CnumSeq, ConferenceNoStartDateError record_revision, record, pending_changes, deactivated_hp_changes, \ undo_list, redo_list = get_cache_contents(recid, uid)[1:] record_strip_empty_volatile_subfields(record) # Check if record already has a cnum tag_111__g_content = record_get_field_value(record, "111", " ", " ", "g") if tag_111__g_content: return else: cnum_seq = CnumSeq() try: new_cnum = cnum_seq.next_value(xml_record=wash_for_xml(print_rec(record))) except ConferenceNoStartDateError: return None field_add_subfield(record['111'][0], 'g', new_cnum) update_cache_contents(recid, uid, record_revision, record, pending_changes, deactivated_hp_changes, undo_list, redo_list) return new_cnum
def save_xml_record(recid, uid, xml_record='', to_upload=True, to_merge=False, task_name="bibedit", sequence_id=None): """Write XML record to file. Default behaviour is to read the record from a BibEdit cache file, filter out the unchanged volatile subfields, write it back to an XML file and then pass this file to BibUpload. @param xml_record: give XML as string in stead of reading cache file @param to_upload: pass the XML file to BibUpload @param to_merge: prepare an XML file for BibMerge to use """ if not xml_record: # Read record from cache file. cache = get_cache_contents(recid, uid) if cache: record = cache[2] used_changes = cache[4] xml_record = record_xml_output(record) delete_cache(recid, uid) delete_disabled_changes(used_changes) else: record = create_record(xml_record)[0] # clean the record from unfilled volatile fields record_strip_empty_volatile_subfields(record) record_strip_empty_fields(record) # order subfields alphabetically before saving the record record_order_subfields(record) xml_to_write = wash_for_xml(record_xml_output(record)) # Write XML file. if not to_merge: fd, file_path = tempfile.mkstemp(dir=CFG_BIBEDIT_CACHEDIR, prefix="%s_" % CFG_BIBEDIT_FILENAME, suffix="_%s_%s.xml" % (recid, uid)) f = os.fdopen(fd, 'w') f.write(xml_to_write) f.close() else: file_path = '%s_%s.xml' % (_get_file_path( recid, uid), CFG_BIBEDIT_TO_MERGE_SUFFIX) xml_file = open(file_path, 'w') xml_file.write(xml_to_write) xml_file.close() user_name = get_user_info(uid)[1] if to_upload: args = [ 'bibupload', user_name, '-P', '5', '-r', file_path, '-u', user_name ] if task_name == "bibedit": args.extend(['--name', 'bibedit']) if sequence_id: args.extend(["-I", sequence_id]) args.extend(['--email-logs-on-error']) task_low_level_submission(*args) return True