def perform_small_request_update_record(requestType, uid, data):
    """Handle record update requests for actions on a subfield level.
    Handle adding, replacing or deleting of subfields.
    """
    result = {'resultCode': 0, 'resultText': '', 'resultHtml': ''}
    recid1 = data["recID1"]
    recid2 = data["recID2"]
    cache_content = get_cache_file_contents(recid1,
                                            uid)  #TODO: check mtime, existence
    cache_dirty = cache_content[0]
    rec_revision = cache_content[1]
    record1 = cache_content[2]
    pending_changes = cache_content[3]
    disabled_hp_changes = cache_content[4]

    mode = data['record2Mode']
    record2 = _get_record_slave(recid2, result, mode, uid)
    if result[
            'resultCode'] != 0:  #if record not accessible return error information
        return result

    ftag, findex1 = _field_info(data['fieldCode1'])
    fnum = ftag[:3]
    findex2 = _field_info(data['fieldCode2'])[1]
    sfindex1 = data['sfindex1']
    sfindex2 = data['sfindex2']

    if requestType == 'deleteSubfield':
        delete_subfield(record1, fnum, findex1, sfindex1)
        result['resultText'] = 'Subfield deleted'
    elif requestType == 'addSubfield':
        add_subfield(record1, record2, fnum, findex1, findex2, sfindex1,
                     sfindex2)
        result['resultText'] = 'Subfield added'
    elif requestType == 'replaceSubfield':
        replace_subfield(record1, record2, fnum, findex1, findex2, sfindex1,
                         sfindex2)
        result['resultText'] = 'Subfield replaced'
    elif requestType == 'diffSubfield':
        result['resultHtml'] = bibmerge_templates.BM_html_subfield_row_diffed(
            record1, record2, fnum, findex1, findex2, sfindex1, sfindex2)
        result['resultText'] = 'Subfields diffed'

    update_cache_file_contents(recid1, uid, rec_revision, record1,
                               pending_changes, disabled_hp_changes, [], [])
    return result
def _get_record(recid, uid, result, fresh_record=False):
    """Retrieve record structure.
    """
    record = None
    mtime = None
    cache_dirty = None
    record_status = record_exists(recid)
    existing_cache = cache_exists(recid, uid)
    if record_status == 0:
        result['resultCode'], result[
            'resultText'] = 1, 'Non-existent record: %s' % recid
    elif record_status == -1:
        result['resultCode'], result[
            'resultText'] = 1, 'Deleted record: %s' % recid
    elif not existing_cache and record_locked_by_other_user(recid, uid):
        result['resultCode'], result[
            'resultText'] = 1, 'Record %s locked by user' % recid
    elif existing_cache and cache_expired(recid, uid) and \
        record_locked_by_other_user(recid, uid):
        result['resultCode'], result[
            'resultText'] = 1, 'Record %s locked by user' % recid
    elif record_locked_by_queue(recid):
        result['resultCode'], result[
            'resultText'] = 1, 'Record %s locked by queue' % recid
    else:
        if fresh_record:
            delete_cache_file(recid, uid)
            existing_cache = False
        if not existing_cache:
            record_revision, record = create_cache_file(recid, uid)
            mtime = get_cache_mtime(recid, uid)
            cache_dirty = False
        else:
            tmpRes = get_cache_file_contents(recid, uid)
            cache_dirty, record_revision, record = tmpRes[0], tmpRes[
                1], tmpRes[2]
            touch_cache_file(recid, uid)
            mtime = get_cache_mtime(recid, uid)
            if not latest_record_revision(recid, record_revision):
                result['cacheOutdated'] = True
        result['resultCode'], result['resultText'], result[
            'cacheDirty'], result[
                'cacheMTime'] = 0, 'Record OK', cache_dirty, mtime
    return record
def perform_small_request_update_record(requestType, uid, data):
    """Handle record update requests for actions on a subfield level.
    Handle adding, replacing or deleting of subfields.
    """
    result = {
        'resultCode': 0,
        'resultText': '',
        'resultHtml': ''
        }
    recid1 = data["recID1"]
    recid2 = data["recID2"]
    cache_content = get_cache_file_contents(recid1, uid) #TODO: check mtime, existence
    cache_dirty = cache_content[0]
    rec_revision = cache_content[1] 
    record1 = cache_content[2]
    pending_changes = cache_content[3]
    disabled_hp_changes = cache_content[4]

    mode = data['record2Mode']
    record2 = _get_record_slave(recid2, result, mode, uid)
    if result['resultCode'] != 0: #if record not accessible return error information
        return result

    ftag, findex1 = _field_info(data['fieldCode1'])
    fnum = ftag[:3]
    findex2 = _field_info(data['fieldCode2'])[1]
    sfindex1 = data['sfindex1']
    sfindex2 = data['sfindex2']

    if requestType == 'deleteSubfield':
        delete_subfield(record1, fnum, findex1, sfindex1)
        result['resultText'] = 'Subfield deleted'
    elif requestType == 'addSubfield':
        add_subfield(record1, record2, fnum, findex1, findex2, sfindex1, sfindex2)
        result['resultText'] = 'Subfield added'
    elif requestType == 'replaceSubfield':
        replace_subfield(record1, record2, fnum, findex1, findex2, sfindex1, sfindex2)
        result['resultText'] = 'Subfield replaced'
    elif requestType == 'diffSubfield':
        result['resultHtml'] = bibmerge_templates.BM_html_subfield_row_diffed(record1, record2, fnum, findex1, findex2, sfindex1, sfindex2)
        result['resultText'] = 'Subfields diffed'

    update_cache_file_contents(recid1, uid, rec_revision, record1, pending_changes, disabled_hp_changes)
    return result
def _get_record(recid, uid, result, fresh_record=False):
    """Retrieve record structure.
    """
    record = None
    mtime = None
    cache_dirty = None
    record_status = record_exists(recid)
    existing_cache = cache_exists(recid, uid)
    if record_status == 0:
        result["resultCode"], result["resultText"] = 1, "Non-existent record: %s" % recid
    elif record_status == -1:
        result["resultCode"], result["resultText"] = 1, "Deleted record: %s" % recid
    elif not existing_cache and record_locked_by_other_user(recid, uid):
        result["resultCode"], result["resultText"] = 1, "Record %s locked by user" % recid
    elif existing_cache and cache_expired(recid, uid) and record_locked_by_other_user(recid, uid):
        result["resultCode"], result["resultText"] = 1, "Record %s locked by user" % recid
    elif record_locked_by_queue(recid):
        result["resultCode"], result["resultText"] = 1, "Record %s locked by queue" % recid
    else:
        if fresh_record:
            delete_cache_file(recid, uid)
            existing_cache = False
        if not existing_cache:
            record_revision, record = create_cache_file(recid, uid)
            mtime = get_cache_mtime(recid, uid)
            cache_dirty = False
        else:
            tmpRes = get_cache_file_contents(recid, uid)
            cache_dirty, record_revision, record = tmpRes[0], tmpRes[1], tmpRes[2]
            touch_cache_file(recid, uid)
            mtime = get_cache_mtime(recid, uid)
            if not latest_record_revision(recid, record_revision):
                result["cacheOutdated"] = True
        result["resultCode"], result["resultText"], result["cacheDirty"], result["cacheMTime"] = (
            0,
            "Record OK",
            cache_dirty,
            mtime,
        )
    return record
def _get_record(recid, uid, result, fresh_record=False):
    """Retrieve record structure.
    """
    record = None
    mtime = None
    cache_dirty = None
    record_status = record_exists(recid)
    existing_cache = cache_exists(recid, uid)
    if record_status == 0:
        result['resultCode'], result['resultText'] = 1, 'Non-existent record: %s' % recid
    elif record_status == -1:
        result['resultCode'], result['resultText'] = 1, 'Deleted record: %s' % recid
    elif not existing_cache and record_locked_by_other_user(recid, uid):
        result['resultCode'], result['resultText'] = 1, 'Record %s locked by user' % recid
    elif existing_cache and cache_expired(recid, uid) and \
        record_locked_by_other_user(recid, uid):
        result['resultCode'], result['resultText'] = 1, 'Record %s locked by user' % recid
    elif record_locked_by_queue(recid):
        result['resultCode'], result['resultText'] = 1, 'Record %s locked by queue' % recid
    else:
        if fresh_record:
            delete_cache_file(recid, uid)
            existing_cache = False
        if not existing_cache:
            record_revision, record = create_cache_file(recid, uid)
            mtime = get_cache_mtime(recid, uid)
            cache_dirty = False
        else:
            cache_dirty, record_revision, record, _, _ = \
                 get_cache_file_contents(recid, uid)
            touch_cache_file(recid, uid)
            mtime = get_cache_mtime(recid, uid)
            if not latest_record_revision(recid, record_revision):
                result['cacheOutdated'] = True
        result['resultCode'], result['resultText'], result['cacheDirty'], result['cacheMTime'] = 0, 'Record OK', cache_dirty, mtime
    return record
def perform_request_update_record(requestType, uid, data):
    """Handle record update requests for actions on a field level.
    Handle merging, adding, or replacing of fields.
    """
    result = {'resultCode': 0, 'resultText': ''}
    recid1 = data["recID1"]
    recid2 = data["recID2"]
    record_content = get_cache_file_contents(recid1, uid)
    cache_dirty = record_content[0]
    rec_revision = record_content[1]
    record1 = record_content[2]
    pending_changes = record_content[3]
    disabled_hp_changes = record_content[4]
    # We will not be able to Undo/Redo correctly after any modifications
    # from the level of bibmerge are performed ! We clear all the undo/redo
    # lists
    undo_list = []
    redo_list = []

    mode = data['record2Mode']
    record2 = _get_record_slave(recid2, result, mode, uid)
    if result[
            'resultCode'] != 0:  #if record not accessible return error information
        return result

    if requestType == 'getFieldGroup':
        result['resultHtml'] = bibmerge_templates.BM_html_field_group(
            record1, record2, data['fieldTag'])
        result['resultText'] = 'Field group retrieved'
        return result
    elif requestType == 'getFieldGroupDiff':
        result['resultHtml'] = bibmerge_templates.BM_html_field_group(
            record1, record2, data['fieldTag'], True)
        result['resultText'] = 'Fields compared'
        return result
    elif requestType == 'mergeFieldGroup' or requestType == 'mergeNCFieldGroup':
        fnum, ind1, ind2 = _fieldtagNum_and_indicators(data['fieldTag'])
        if requestType == 'mergeNCFieldGroup':
            merge_field_group(record1, record2, fnum, ind1, ind2, False)
        else:
            merge_field_group(record1, record2, fnum, ind1, ind2, True)
        resultText = 'Field group merged'

    elif requestType == 'replaceField' or requestType == 'addField':
        fnum, ind1, ind2 = _fieldtagNum_and_indicators(data['fieldTag'])
        findex1 = _field_info(data['fieldCode1'])[1]
        findex2 = _field_info(data['fieldCode2'])[1]
        if findex2 == None:
            result['resultCode'], result[
                'resultText'] = 1, 'No value in the selected field'
            return result
        if requestType == 'replaceField':
            replace_field(record1, record2, fnum, findex1, findex2)
            resultText = 'Field replaced'
        else:  # requestType == 'addField'
            add_field(record1, record2, fnum, findex1, findex2)
            resultText = 'Field added'

    elif requestType == 'deleteField':
        fnum, ind1, ind2 = _fieldtagNum_and_indicators(data['fieldTag'])
        findex1 = _field_info(data['fieldCode1'])[1]
        if findex1 == None:
            result['resultCode'], result[
                'resultText'] = 1, 'No value in the selected field'
            return result
        delete_field(record1, fnum, findex1)
        resultText = 'Field deleted'

    elif requestType == 'mergeField':
        fnum, ind1, ind2 = _fieldtagNum_and_indicators(data['fieldTag'])
        findex1 = _field_info(data['fieldCode1'])[1]
        findex2 = _field_info(data['fieldCode2'])[1]
        if findex2 == None:
            result['resultCode'], result[
                'resultText'] = 1, 'No value in the selected field'
            return result
        merge_field(record1, record2, fnum, findex1, findex2)
        resultText = 'Field merged'

    else:
        result['resultCode'], result['resultText'] = 1, 'Wrong request type'
        return result

    result['resultHtml'] = bibmerge_templates.BM_html_field_group(
        record1, record2, data['fieldTag'])
    result['resultText'] = resultText
    update_cache_file_contents(recid1, uid, rec_revision, record1,
                               pending_changes, disabled_hp_changes, undo_list,
                               redo_list)
    return result
def perform_request_update_record(requestType, uid, data):
    """Handle record update requests for actions on a field level.
    Handle merging, adding, or replacing of fields.
    """
    result = {
        'resultCode': 0,
        'resultText': ''
        }
    recid1 = data["recID1"]
    recid2 = data["recID2"]
    record_content = get_cache_file_contents(recid1, uid)
    cache_dirty = record_content[0]
    rec_revision = record_content[1]
    record1 = record_content[2]
    pending_changes = record_content[3]
    disabled_hp_changes = record_content[4]

    mode = data['record2Mode']
    record2 = _get_record_slave(recid2, result, mode, uid)
    if result['resultCode'] != 0: #if record not accessible return error information
        return result

    if requestType == 'getFieldGroup':
        result['resultHtml'] = bibmerge_templates.BM_html_field_group(record1, record2, data['fieldTag'])
        result['resultText'] = 'Field group retrieved'
        return result
    elif requestType == 'getFieldGroupDiff':
        result['resultHtml'] = bibmerge_templates.BM_html_field_group(record1, record2, data['fieldTag'], True)
        result['resultText'] = 'Fields compared'
        return result
    elif requestType == 'mergeFieldGroup' or requestType == 'mergeNCFieldGroup':
        fnum, ind1, ind2 = _fieldtagNum_and_indicators(data['fieldTag'])
        if requestType == 'mergeNCFieldGroup':
            merge_field_group(record1, record2, fnum, ind1, ind2, False)
        else:
            merge_field_group(record1, record2, fnum, ind1, ind2, True)
        resultText = 'Field group merged'

    elif requestType == 'replaceField' or requestType == 'addField':
        fnum, ind1, ind2 = _fieldtagNum_and_indicators(data['fieldTag'])
        findex1 = _field_info( data['fieldCode1'] )[1]
        findex2 = _field_info( data['fieldCode2'] )[1]
        if findex2 == None:
            result['resultCode'], result['resultText'] = 1, 'No value in the selected field'
            return result
        if requestType == 'replaceField':
            replace_field(record1, record2, fnum, findex1, findex2)
            resultText = 'Field replaced'
        else: # requestType == 'addField'
            add_field(record1, record2, fnum, findex1, findex2)
            resultText = 'Field added'

    elif requestType == 'deleteField':
        fnum, ind1, ind2 = _fieldtagNum_and_indicators(data['fieldTag'])
        findex1 = _field_info( data['fieldCode1'] )[1]
        if findex1 == None:
            result['resultCode'], result['resultText'] = 1, 'No value in the selected field'
            return result
        delete_field(record1, fnum, findex1)
        resultText = 'Field deleted'

    elif requestType == 'mergeField':
        fnum, ind1, ind2 = _fieldtagNum_and_indicators(data['fieldTag'])
        findex1 = _field_info( data['fieldCode1'] )[1]
        findex2 = _field_info( data['fieldCode2'] )[1]
        if findex2 == None:
            result['resultCode'], result['resultText'] = 1, 'No value in the selected field'
            return result
        merge_field(record1, record2, fnum, findex1, findex2)
        resultText = 'Field merged'

    else:
        result['resultCode'], result['resultText'] = 1, 'Wrong request type'
        return result

    result['resultHtml'] = bibmerge_templates.BM_html_field_group(record1, record2, data['fieldTag'])
    result['resultText'] = resultText
    update_cache_file_contents(recid1, uid, rec_revision, record1, pending_changes, disabled_hp_changes )
    return result
def perform_request_update_record(request_type, recid, uid, cacheMTime, data, changeApplied, isBulk=False):
    """Handle record update requests like adding, modifying, moving or deleting
    of fields or subfields. Possible common error situations:
    - Missing cache file
    - Cache file modified in other editor
    """

    response = {}

    if not cache_exists(recid, uid):
        response['resultCode'] = 106
    elif not get_cache_mtime(recid, uid) == cacheMTime and isBulk == False:
        # In case of a bulk request, the changes are deliberately performed imemdiately one after another
        response['resultCode'] = 107
    else:
        try:
            record_revision, record, pending_changes, desactivated_hp_changes = get_cache_file_contents(recid, uid)[1:]
        except:
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV['wrong_cache_file_format']
            return response;

        if changeApplied != -1:
            pending_changes = pending_changes[:changeApplied] + pending_changes[changeApplied+1:]

        field_position_local = data.get('fieldPosition')
        if field_position_local is not None:
            field_position_local = int(field_position_local)
        if request_type == 'overrideChangesList':
            pending_changes = data['newChanges']
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV['editor_modifications_changed']
        elif request_type == 'removeChange':
            #the change is removed automatically by passing the changeApplied parameter
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV['editor_modifications_changed']
        elif request_type == 'desactivateHoldingPenChangeset':
            # the changeset has been marked as processed ( user applied it in the editor)
            # marking as used in the cache file
            # CAUTION: This function has been implemented here because logically it fits
            #          with the modifications made to the cache file. No changes are made to the
            #          Holding Pen physically. The changesets are related to the cache because
            #          we want to cancel the removal every time the cache disappears for any reason
            desactivated_hp_changes[data.get('desactivatedChangeset')] = True;
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV['disabled_hp_changeset']
        elif request_type == 'addField':
            if data['controlfield']:
                record_add_field(record, data['tag'],
                                 controlfield_value=data['value'])
                response['resultCode'] = 20
            else:
                record_add_field(record, data['tag'], data['ind1'],
                                 data['ind2'], subfields=data['subfields'],
                                 field_position_local=field_position_local)
                response['resultCode'] = 21

        elif request_type == 'addSubfields':
            subfields = data['subfields']
            for subfield in subfields:
                record_add_subfield_into(record, data['tag'], subfield[0],
                    subfield[1], subfield_position=None,
                    field_position_local=field_position_local)
            if len(subfields) == 1:
                response['resultCode'] = 22
            else:
                response['resultCode'] = 23
        elif request_type == 'modifyField': # changing the field structure
            # first remove subfields and then add new... change the indices
            subfields = data['subFields'] # parse the JSON representation of the subfields here

            new_field = create_field(subfields, data['ind1'], data['ind2']);
            record_replace_field(record, data['tag'], new_field, field_position_local = data['fieldPosition'])
            response['resultCode'] = 26
            #response['debuggingValue'] = data['subFields'];

        elif request_type == 'modifyContent':
            if data['subfieldIndex'] != None:
                record_modify_subfield(record, data['tag'],
                    data['subfieldCode'], data['value'],
                    int(data['subfieldIndex']),
                    field_position_local=field_position_local)
            else:
                record_modify_controlfield(record, data['tag'], data["value"],
                  field_position_local=field_position_local)
            response['resultCode'] = 24

        elif request_type == 'moveSubfield':
            record_move_subfield(record, data['tag'],
                int(data['subfieldIndex']), int(data['newSubfieldIndex']),
                field_position_local=field_position_local)
            response['resultCode'] = 25

        elif request_type == 'moveField':
            if data['direction'] == 'up':
                final_position_local = field_position_local-1
            else: # direction is 'down'
                final_position_local = field_position_local+1
            record_move_fields(record, data['tag'], [field_position_local],
                final_position_local)
            response['resultCode'] = 32

        elif request_type == 'deleteFields':
            to_delete = data['toDelete']
            deleted_fields = 0
            deleted_subfields = 0
            for tag in to_delete:
                # Sorting the fields in a edcreasing order by the local position !
                fieldsOrder = to_delete[tag].keys()
                fieldsOrder.sort(lambda a,b: int(b)-int(a))
                for field_position_local in fieldsOrder:
                    if not to_delete[tag][field_position_local]:
                        # No subfields specified - delete entire field.
                        record_delete_field(record, tag,
                            field_position_local=int(field_position_local))
                        deleted_fields += 1
                    else:
                        for subfield_position in \
                                to_delete[tag][field_position_local][::-1]:
                            # Delete subfields in reverse order (to keep the
                            # indexing correct).
                            record_delete_subfield_from(record, tag,
                                int(subfield_position),
                                field_position_local=int(field_position_local))
                            deleted_subfields += 1
            if deleted_fields == 1 and deleted_subfields == 0:
                response['resultCode'] = 26
            elif deleted_fields and deleted_subfields == 0:
                response['resultCode'] = 27
            elif deleted_subfields == 1 and deleted_fields == 0:
                response['resultCode'] = 28
            elif deleted_subfields and deleted_fields == 0:
                response['resultCode'] = 29
            else:
                response['resultCode'] = 30
        response['cacheMTime'], response['cacheDirty'] = \
            update_cache_file_contents(recid, uid, record_revision, record, \
                                       pending_changes, desactivated_hp_changes), \
            True

    return response
def perform_request_record(req, request_type, recid, uid, data):
    """Handle 'major' record related requests like fetching, submitting or
    deleting a record, cancel editing or preparing a record for merging.

    """
    response = {}

    if request_type == 'newRecord':
        # Create a new record.
        new_recid = reserve_record_id()
        new_type = data['newType']
        if new_type == 'empty':
            # Create a new empty record.
            create_cache_file(recid, uid)
            response['resultCode'], response['newRecID'] = 6, new_recid

        elif new_type == 'template':
            # Create a new record from XML record template.
            template_filename = data['templateFilename']
            template = get_record_template(template_filename)
            if not template:
                response['resultCode']  = 108
            else:
                record = create_record(template)[0]
                if not record:
                    response['resultCode']  = 109
                else:
                    record_add_field(record, '001',
                                     controlfield_value=str(new_recid))
                    create_cache_file(new_recid, uid, record, True)
                    response['resultCode'], response['newRecID']  = 7, new_recid

        elif new_type == 'clone':
            # Clone an existing record (from the users cache).
            existing_cache = cache_exists(recid, uid)
            if existing_cache:
                try:
                    record = get_cache_file_contents(recid, uid)[2]
                except:
                    # if, for example, the cache format was wrong (outdated)
                    record = get_bibrecord(recid)
            else:
                # Cache missing. Fall back to using original version.
                record = get_bibrecord(recid)
            record_delete_field(record, '001')
            record_add_field(record, '001', controlfield_value=str(new_recid))
            create_cache_file(new_recid, uid, record, True)
            response['resultCode'], response['newRecID'] = 8, new_recid
    elif request_type == 'getRecord':
        # Fetch the record. Possible error situations:
        # - Non-existing record
        # - Deleted record
        # - Record locked by other user
        # - Record locked by queue
        # A cache file will be created if it does not exist.
        # If the cache is outdated (i.e., not based on the latest DB revision),
        # cacheOutdated will be set to True in the response.
        record_status = record_exists(recid)
        existing_cache = cache_exists(recid, uid)
        read_only_mode = False
        if data.has_key("inReadOnlyMode"):
            read_only_mode = data['inReadOnlyMode']

        if record_status == 0:
            response['resultCode'] = 102
        elif record_status == -1:
            response['resultCode'] = 103
        elif not read_only_mode and not existing_cache and \
                record_locked_by_other_user(recid, uid):
            response['resultCode'] = 104
        elif not read_only_mode and existing_cache and \
                cache_expired(recid, uid) and \
                record_locked_by_other_user(recid, uid):
            response['resultCode'] = 104
        elif not read_only_mode and record_locked_by_queue(recid):
            response['resultCode'] = 105
        else:
            if data.get('deleteRecordCache'):
                delete_cache_file(recid, uid)
                existing_cache = False
                pending_changes = []
                disabled_hp_changes = {}
            if read_only_mode:
                if data.has_key('recordRevision'):
                    record_revision_ts = data['recordRevision']
                    record_xml = get_marcxml_of_revision(recid, record_revision_ts)
                    record = create_record(record_xml)[0]
                    record_revision = timestamp_to_revision(record_revision_ts)
                    pending_changes = []
                    disabled_hp_changes = {}
                else:
                    # a normal cacheless retrieval of a record
                    record = get_bibrecord(recid)
                    record_revision = get_record_last_modification_date(recid)
                    pending_changes = []
                    disabled_hp_changes = {}
                cache_dirty = False
                mtime = 0
            elif not existing_cache:
                record_revision, record = create_cache_file(recid, uid)
                mtime = get_cache_mtime(recid, uid)
                pending_changes = []
                disabled_hp_changes = {}
                cache_dirty = False
            else:
                try:
                    cache_dirty, record_revision, record, pending_changes, disabled_hp_changes= \
                        get_cache_file_contents(recid, uid)
                    touch_cache_file(recid, uid)
                    mtime = get_cache_mtime(recid, uid)
                    if not latest_record_revision(recid, record_revision):
                        response['cacheOutdated'] = True
                except:
                    record_revision, record = create_cache_file(recid, uid)
                    mtime = get_cache_mtime(recid, uid)
                    pending_changes = []
                    disabled_hp_changes = {}
                    cache_dirty = False

            if data['clonedRecord']:
                response['resultCode'] = 9
            else:
                response['resultCode'] = 3

            revision_author = get_record_revision_author(recid, record_revision)
            last_revision_ts = revision_to_timestamp(get_record_last_modification_date(recid))
            revisions_history = get_record_revision_timestamps(recid)

            response['cacheDirty'], response['record'], response['cacheMTime'],\
                response['recordRevision'], response['revisionAuthor'], \
                response['lastRevision'], response['revisionsHistory'], \
                response['inReadOnlyMode'], response['pendingHpChanges'], \
                response['disabledHpChanges'] = cache_dirty, record, mtime, \
                revision_to_timestamp(record_revision), revision_author, \
                last_revision_ts, revisions_history, read_only_mode, pending_changes, \
                disabled_hp_changes
            # Set tag format from user's session settings.
            try:
                tagformat_settings = session_param_get(req, 'bibedit_tagformat')
                tagformat = tagformat_settings[recid]
            except KeyError:
                tagformat = CFG_BIBEDIT_TAG_FORMAT
            response['tagFormat'] = tagformat

    elif request_type == 'submit':
        # Submit the record. Possible error situations:
        # - Missing cache file
        # - Cache file modified in other editor
        # - Record locked by other user
        # - Record locked by queue
        # - Invalid XML characters
        # If the cache is outdated cacheOutdated will be set to True in the
        # response.
        if not cache_exists(recid, uid):
            response['resultCode'] = 106
        elif not get_cache_mtime(recid, uid) == data['cacheMTime']:
            response['resultCode'] = 107
        elif cache_expired(recid, uid) and \
                record_locked_by_other_user(recid, uid):
            response['resultCode'] = 104
        elif record_locked_by_queue(recid):
            response['resultCode'] = 105
        else:
            try:
                record_revision, record, pending_changes, disabled_changes = get_cache_file_contents(recid, uid)[1:]
                xml_record = print_rec(record)
                record, status_code, list_of_errors = create_record(xml_record)
                if status_code == 0:
                    response['resultCode'], response['errors'] = 110, \
                        list_of_errors
                elif not data['force'] and \
                        not latest_record_revision(recid, record_revision):
                    response['cacheOutdated'] = True
                else:
                    save_xml_record(recid, uid)
                    response['resultCode'] = 4
            except:
                response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV['wrong_cache_file_format']
    elif request_type == 'revert':
        revId = data['revId']
        job_date = "%s-%s-%s %s:%s:%s" % re_revdate_split.search(revId).groups()
        revision_xml = get_marcxml_of_revision(recid, job_date)
        save_xml_record(recid, uid, revision_xml)
        if (cache_exists(recid, uid)):
            delete_cache_file(recid, uid)
        response['resultCode'] = 4

    elif request_type == 'cancel':
        # Cancel editing by deleting the cache file. Possible error situations:
        # - Cache file modified in other editor
        if cache_exists(recid, uid):
            if get_cache_mtime(recid, uid) == data['cacheMTime']:
                delete_cache_file(recid, uid)
                response['resultCode'] = 5
            else:
                response['resultCode'] = 107
        else:
            response['resultCode'] = 5

    elif request_type == 'deleteRecord':
        # Submit the record. Possible error situations:
        # - Record locked by other user
        # - Record locked by queue
        # As the user is requesting deletion we proceed even if the cache file
        # is missing and we don't check if the cache is outdated or has
        # been modified in another editor.
        existing_cache = cache_exists(recid, uid)
        pending_changes = []
        if existing_cache and cache_expired(recid, uid) and \
                record_locked_by_other_user(recid, uid):
            response['resultCode'] = 104
        elif record_locked_by_queue(recid):
            response['resultCode'] = 105
        else:
            if not existing_cache:
                record_revision, record, pending_changes, desactivated_hp_changes = create_cache_file(recid, uid)
            else:
                try:
                    record_revision, record, pending_changes, desactivated_hp_changes = get_cache_file_contents(
                        recid, uid)[1:]
                except:
                    record_revision, record, pending_changes, desactivated_hp_changes = create_cache_file(recid, uid)
            record_add_field(record, '980', ' ', ' ', '', [('c', 'DELETED')])
            update_cache_file_contents(recid, uid, record_revision, record, pending_changes, desactivated_hp_changes)
            save_xml_record(recid, uid)
            delete_related_holdingpen_changes(recid) # we don't need any changes related to a deleted record
            response['resultCode'] = 10

    elif request_type == 'deleteRecordCache':
        # Delete the cache file. Ignore the request if the cache has been
        # modified in another editor.
        if cache_exists(recid, uid) and get_cache_mtime(recid, uid) == \
                data['cacheMTime']:
            delete_cache_file(recid, uid)
        response['resultCode'] = 11

    elif request_type == 'prepareRecordMerge':
        # We want to merge the cache with the current DB version of the record,
        # so prepare an XML file from the file cache, to be used by BibMerge.
        # Possible error situations:
        # - Missing cache file
        # - Record locked by other user
        # - Record locked by queue
        # We don't check if cache is outdated (a likely scenario for this
        # request) or if it has been modified in another editor.
        if not cache_exists(recid, uid):
            response['resultCode'] = 106
        elif cache_expired(recid, uid) and \
                record_locked_by_other_user(recid, uid):
            response['resultCode'] = 104
        elif record_locked_by_queue(recid):
            response['resultCode'] = 105
        else:
            save_xml_record(recid, uid, to_upload=False, to_merge=True)
            response['resultCode'] = 12

    return response
def perform_request_update_record(requestType, uid, data):
    """Handle record update requests for actions on a field level.
    Handle merging, adding, or replacing of fields.
    """
    result = {"resultCode": 0, "resultText": ""}
    recid1 = data["recID1"]
    recid2 = data["recID2"]
    record_content = get_cache_file_contents(recid1, uid)
    cache_dirty = record_content[0]
    rec_revision = record_content[1]
    record1 = record_content[2]
    pending_changes = record_content[3]
    disabled_hp_changes = record_content[4]
    # We will not be able to Undo/Redo correctly after any modifications
    # from the level of bibmerge are performed ! We clear all the undo/redo
    # lists
    undo_list = []
    redo_list = []

    mode = data["record2Mode"]
    record2 = _get_record_slave(recid2, result, mode, uid)
    if result["resultCode"] != 0:  # if record not accessible return error information
        return result

    if requestType == "getFieldGroup":
        result["resultHtml"] = bibmerge_templates.BM_html_field_group(record1, record2, data["fieldTag"])
        result["resultText"] = "Field group retrieved"
        return result
    elif requestType == "getFieldGroupDiff":
        result["resultHtml"] = bibmerge_templates.BM_html_field_group(record1, record2, data["fieldTag"], True)
        result["resultText"] = "Fields compared"
        return result
    elif requestType == "mergeFieldGroup" or requestType == "mergeNCFieldGroup":
        fnum, ind1, ind2 = _fieldtagNum_and_indicators(data["fieldTag"])
        if requestType == "mergeNCFieldGroup":
            merge_field_group(record1, record2, fnum, ind1, ind2, False)
        else:
            merge_field_group(record1, record2, fnum, ind1, ind2, True)
        resultText = "Field group merged"

    elif requestType == "replaceField" or requestType == "addField":
        fnum, ind1, ind2 = _fieldtagNum_and_indicators(data["fieldTag"])
        findex1 = _field_info(data["fieldCode1"])[1]
        findex2 = _field_info(data["fieldCode2"])[1]
        if findex2 == None:
            result["resultCode"], result["resultText"] = 1, "No value in the selected field"
            return result
        if requestType == "replaceField":
            replace_field(record1, record2, fnum, findex1, findex2)
            resultText = "Field replaced"
        else:  # requestType == 'addField'
            add_field(record1, record2, fnum, findex1, findex2)
            resultText = "Field added"

    elif requestType == "deleteField":
        fnum, ind1, ind2 = _fieldtagNum_and_indicators(data["fieldTag"])
        findex1 = _field_info(data["fieldCode1"])[1]
        if findex1 == None:
            result["resultCode"], result["resultText"] = 1, "No value in the selected field"
            return result
        delete_field(record1, fnum, findex1)
        resultText = "Field deleted"

    elif requestType == "mergeField":
        fnum, ind1, ind2 = _fieldtagNum_and_indicators(data["fieldTag"])
        findex1 = _field_info(data["fieldCode1"])[1]
        findex2 = _field_info(data["fieldCode2"])[1]
        if findex2 == None:
            result["resultCode"], result["resultText"] = 1, "No value in the selected field"
            return result
        merge_field(record1, record2, fnum, findex1, findex2)
        resultText = "Field merged"

    else:
        result["resultCode"], result["resultText"] = 1, "Wrong request type"
        return result

    result["resultHtml"] = bibmerge_templates.BM_html_field_group(record1, record2, data["fieldTag"])
    result["resultText"] = resultText
    update_cache_file_contents(
        recid1, uid, rec_revision, record1, pending_changes, disabled_hp_changes, undo_list, redo_list
    )
    return result
def perform_request_update_record(request_type, recid, uid, cacheMTime, data, \
                                  hpChanges, undoRedoOp, isBulk=False):
    """Handle record update requests like adding, modifying, moving or deleting
    of fields or subfields. Possible common error situations:
    - Missing cache file
    - Cache file modified in other editor
    Explanation of some parameters:
       undoRedoOp - Indicates in "undo"/"redo"/undo_descriptor operation is
                    performed by a current request.
    """

    response = {}

    if not cache_exists(recid, uid):
        response['resultCode'] = 106
    elif not get_cache_mtime(recid, uid) == cacheMTime and isBulk == False:
        # In case of a bulk request, the changes are deliberately performed
        # imemdiately one after another
        response['resultCode'] = 107
    else:
        try:
            record_revision, record, pending_changes, deactivated_hp_changes, \
                undo_list, redo_list = get_cache_file_contents(recid, uid)[1:]
        except:
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV[ \
                'wrong_cache_file_format']
            return response

        # process all the Holding Pen changes operations ... regardles the
        # request type
#        import rpdb2;
#        rpdb2.start_embedded_debugger('password', fAllowRemote=True)
        if hpChanges.has_key("toDisable"):
            for changeId in hpChanges["toDisable"]:
                pending_changes[changeId]["applied_change"] = True

        if hpChanges.has_key("toEnable"):
            for changeId in hpChanges["toEnable"]:
                pending_changes[changeId]["applied_change"] = False

        if hpChanges.has_key("toOverride"):
            pending_changes = hpChanges["toOverride"]

        if hpChanges.has_key("changesetsToDeactivate"):
            for changesetId in hpChanges["changesetsToDeactivate"]:
                deactivated_hp_changes[changesetId] = True

        if hpChanges.has_key("changesetsToActivate"):
            for changesetId in hpChanges["changesetsToActivate"]:
                deactivated_hp_changes[changesetId] = False

        # processing the undo/redo entries
        if undoRedoOp == "undo":
            try:
                redo_list = [undo_list[-1]] + redo_list
                undo_list = undo_list[:-1]
            except:
                raise Exception("An exception occured when undoing previous" + \
                                " operation. Undo list: " + str(undo_list) + \
                                " Redo list " + str(redo_list))
        elif undoRedoOp == "redo":
            try:
                undo_list = undo_list + [redo_list[0]]
                redo_list = redo_list[1:]
            except:
                raise Exception("An exception occured when redoing previous" + \
                                " operation. Undo list: " + str(undo_list) + \
                                " Redo list " + str(redo_list))
        else:
            # This is a genuine operation - we have to add a new descriptor
            # to the undo list and cancel the redo unless the operation is
            # a bulk operation
            if undoRedoOp != None:
                undo_list = undo_list + [undoRedoOp]
                redo_list = []
            else:
                assert isBulk == True

        field_position_local = data.get('fieldPosition')
        if field_position_local is not None:
            field_position_local = int(field_position_local)
        if request_type == 'otherUpdateRequest':
            # An empty request. Might be useful if we want to perform
            # operations that require only the actions performed globally,
            # like modifying the holdingPen changes list
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV[ \
                'editor_modifications_changed']
        elif request_type == 'deactivateHoldingPenChangeset':
            # the changeset has been marked as processed ( user applied it in
            # the editor). Marking as used in the cache file.
            # CAUTION: This function has been implemented here because logically
            #          it fits with the modifications made to the cache file.
            #          No changes are made to the Holding Pen physically. The
            #          changesets are related to the cache because we want to
            #          cancel the removal every time the cache disappears for
            #          any reason
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV[ \
                'disabled_hp_changeset']
        elif request_type == 'addField':
            if data['controlfield']:
                record_add_field(record, data['tag'],
                                 controlfield_value=data['value'])
                response['resultCode'] = 20
            else:
                record_add_field(record, data['tag'], data['ind1'],
                                 data['ind2'], subfields=data['subfields'],
                                 field_position_local=field_position_local)
                response['resultCode'] = 21

        elif request_type == 'addSubfields':
            subfields = data['subfields']
            for subfield in subfields:
                record_add_subfield_into(record, data['tag'], subfield[0],
                    subfield[1], subfield_position=None,
                    field_position_local=field_position_local)
            if len(subfields) == 1:
                response['resultCode'] = 22
            else:
                response['resultCode'] = 23
        elif request_type == 'addFieldsSubfieldsOnPositions':
            #1) Sorting the fields by their identifiers
            fieldsToAdd = data['fieldsToAdd']
            subfieldsToAdd = data['subfieldsToAdd']
            for tag in fieldsToAdd.keys():
                positions = fieldsToAdd[tag].keys()
                positions.sort()
                for position in positions:
                    # now adding fields at a position

                    isControlfield = (len(fieldsToAdd[tag][position][0]) == 0)
                    # if there are n subfields, this is a control field
                    if isControlfield:
                        controlfieldValue = fieldsToAdd[tag][position][3]
                        record_add_field(record, tag, field_position_local = \
                                             int(position), \
                                             controlfield_value = \
                                                 controlfieldValue)
                    else:
                        subfields = fieldsToAdd[tag][position][0]
                        ind1 = fieldsToAdd[tag][position][1]
                        ind2 = fieldsToAdd[tag][position][2]
                        record_add_field(record, tag, ind1, ind2, subfields = \
                                             subfields, field_position_local = \
                                                int(position))
            # now adding the subfields
            for tag in subfieldsToAdd.keys():
                for fieldPosition in subfieldsToAdd[tag].keys(): #now the fields
                                                          #order not important !
                    subfieldsPositions = subfieldsToAdd[tag][fieldPosition]. \
                                           keys()
                    subfieldsPositions.sort()
                    for subfieldPosition in subfieldsPositions:
                        subfield = subfieldsToAdd[tag][fieldPosition]\
                            [subfieldPosition]
                        record_add_subfield_into(record, tag, subfield[0], \
                                                 subfield[1], \
                                                 subfield_position = \
                                                     int(subfieldPosition), \
                                                 field_position_local = \
                                                     int(fieldPosition))

            response['resultCode'] = \
                CFG_BIBEDIT_AJAX_RESULT_CODES_REV['added_positioned_subfields']

        elif request_type == 'modifyField': # changing the field structure
            # first remove subfields and then add new... change the indices
            subfields = data['subFields'] # parse the JSON representation of
                                          # the subfields here

            new_field = create_field(subfields, data['ind1'], data['ind2'])
            record_replace_field(record, data['tag'], new_field, \
                field_position_local = data['fieldPosition'])
            response['resultCode'] = 26

        elif request_type == 'modifyContent':
            if data['subfieldIndex'] != None:
                record_modify_subfield(record, data['tag'],
                    data['subfieldCode'], data['value'],
                    int(data['subfieldIndex']),
                    field_position_local=field_position_local)
            else:
                record_modify_controlfield(record, data['tag'], data["value"],
                  field_position_local=field_position_local)
            response['resultCode'] = 24

        elif request_type == 'moveSubfield':
            record_move_subfield(record, data['tag'],
                int(data['subfieldIndex']), int(data['newSubfieldIndex']),
                field_position_local=field_position_local)
            response['resultCode'] = 25

        elif request_type == 'moveField':
            if data['direction'] == 'up':
                final_position_local = field_position_local-1
            else: # direction is 'down'
                final_position_local = field_position_local+1
            record_move_fields(record, data['tag'], [field_position_local],
                final_position_local)
            response['resultCode'] = 32

        elif request_type == 'deleteFields':
            to_delete = data['toDelete']
            deleted_fields = 0
            deleted_subfields = 0
            for tag in to_delete:
                #Sorting the fields in a edcreasing order by the local position!
                fieldsOrder = to_delete[tag].keys()
                fieldsOrder.sort(lambda a, b: int(b) - int(a))
                for field_position_local in fieldsOrder:
                    if not to_delete[tag][field_position_local]:
                        # No subfields specified - delete entire field.
                        record_delete_field(record, tag,
                            field_position_local=int(field_position_local))
                        deleted_fields += 1
                    else:
                        for subfield_position in \
                                to_delete[tag][field_position_local][::-1]:
                            # Delete subfields in reverse order (to keep the
                            # indexing correct).
                            record_delete_subfield_from(record, tag,
                                int(subfield_position),
                                field_position_local=int(field_position_local))
                            deleted_subfields += 1
            if deleted_fields == 1 and deleted_subfields == 0:
                response['resultCode'] = 26
            elif deleted_fields and deleted_subfields == 0:
                response['resultCode'] = 27
            elif deleted_subfields == 1 and deleted_fields == 0:
                response['resultCode'] = 28
            elif deleted_subfields and deleted_fields == 0:
                response['resultCode'] = 29
            else:
                response['resultCode'] = 30
        response['cacheMTime'], response['cacheDirty'] = \
            update_cache_file_contents(recid, uid, record_revision, record, \
                                       pending_changes, \
                                       deactivated_hp_changes, \
                                       undo_list, redo_list), \
            True

    return response
Exemple #12
0
def perform_request_record(req, request_type, recid, uid, data, ln=CFG_SITE_LANG):
    """Handle 'major' record related requests like fetching, submitting or
    deleting a record, cancel editing or preparing a record for merging.

    """
    response = {}

    if request_type == "newRecord":
        # Create a new record.
        new_recid = reserve_record_id()
        new_type = data["newType"]
        if new_type == "empty":
            # Create a new empty record.
            create_cache_file(recid, uid)
            response["resultCode"], response["newRecID"] = 6, new_recid

        elif new_type == "template":
            # Create a new record from XML record template.
            template_filename = data["templateFilename"]
            template = get_record_template(template_filename)
            if not template:
                response["resultCode"] = 108
            else:
                record = create_record(template)[0]
                if not record:
                    response["resultCode"] = 109
                else:
                    record_add_field(record, "001", controlfield_value=str(new_recid))
                    create_cache_file(new_recid, uid, record, True)
                    response["resultCode"], response["newRecID"] = 7, new_recid

        elif new_type == "clone":
            # Clone an existing record (from the users cache).
            existing_cache = cache_exists(recid, uid)
            if existing_cache:
                try:
                    record = get_cache_file_contents(recid, uid)[2]
                except:
                    # if, for example, the cache format was wrong (outdated)
                    record = get_bibrecord(recid)
            else:
                # Cache missing. Fall back to using original version.
                record = get_bibrecord(recid)
            record_delete_field(record, "001")
            record_add_field(record, "001", controlfield_value=str(new_recid))
            create_cache_file(new_recid, uid, record, True)
            response["resultCode"], response["newRecID"] = 8, new_recid
    elif request_type == "getRecord":
        # Fetch the record. Possible error situations:
        # - Non-existing record
        # - Deleted record
        # - Record locked by other user
        # - Record locked by queue
        # A cache file will be created if it does not exist.
        # If the cache is outdated (i.e., not based on the latest DB revision),
        # cacheOutdated will be set to True in the response.
        record_status = record_exists(recid)
        existing_cache = cache_exists(recid, uid)
        read_only_mode = False

        if data.has_key("inReadOnlyMode"):
            read_only_mode = data["inReadOnlyMode"]

        if record_status == 0:
            response["resultCode"] = 102
        elif record_status == -1:
            response["resultCode"] = 103
        elif not read_only_mode and not existing_cache and record_locked_by_other_user(recid, uid):
            response["resultCode"] = 104
        elif (
            not read_only_mode
            and existing_cache
            and cache_expired(recid, uid)
            and record_locked_by_other_user(recid, uid)
        ):
            response["resultCode"] = 104
        elif not read_only_mode and record_locked_by_queue(recid):
            response["resultCode"] = 105
        else:
            if data.get("deleteRecordCache"):
                delete_cache_file(recid, uid)
                existing_cache = False
                pending_changes = []
                disabled_hp_changes = {}
            if read_only_mode:
                if data.has_key("recordRevision"):
                    record_revision_ts = data["recordRevision"]
                    record_xml = get_marcxml_of_revision(recid, record_revision_ts)
                    record = create_record(record_xml)[0]
                    record_revision = timestamp_to_revision(record_revision_ts)
                    pending_changes = []
                    disabled_hp_changes = {}
                else:
                    # a normal cacheless retrieval of a record
                    record = get_bibrecord(recid)
                    record_revision = get_record_last_modification_date(recid)
                    pending_changes = []
                    disabled_hp_changes = {}
                cache_dirty = False
                mtime = 0
                undo_list = []
                redo_list = []
            elif not existing_cache:
                record_revision, record = create_cache_file(recid, uid)
                mtime = get_cache_mtime(recid, uid)
                pending_changes = []
                disabled_hp_changes = {}
                undo_list = []
                redo_list = []
                cache_dirty = False
            else:
                # TODO: This try except should be replaced with something nicer,
                #      like an argument indicating if a new cache file is to
                #      be created
                try:
                    cache_dirty, record_revision, record, pending_changes, disabled_hp_changes, undo_list, redo_list = get_cache_file_contents(
                        recid, uid
                    )
                    touch_cache_file(recid, uid)
                    mtime = get_cache_mtime(recid, uid)
                    if not latest_record_revision(recid, record_revision) and get_record_revisions(recid) != ():
                        # This sould prevent from using old cache in case of
                        # viewing old version. If there are no revisions,
                        # it means we should skip this step because this
                        # is a new record
                        response["cacheOutdated"] = True

                except:
                    record_revision, record = create_cache_file(recid, uid)
                    mtime = get_cache_mtime(recid, uid)
                    pending_changes = []
                    disabled_hp_changes = {}
                    cache_dirty = False
                    undo_list = []
                    redo_list = []
            if data["clonedRecord"]:
                response["resultCode"] = 9
            else:
                response["resultCode"] = 3
            revision_author = get_record_revision_author(recid, record_revision)
            last_revision_ts = revision_to_timestamp(get_record_last_modification_date(recid))
            revisions_history = get_record_revision_timestamps(recid)
            number_of_physical_copies = get_number_copies(recid)
            bibcirc_details_URL = create_item_details_url(recid, ln)
            can_have_copies = can_record_have_physical_copies(recid)

            response["cacheDirty"], response["record"], response["cacheMTime"], response["recordRevision"], response[
                "revisionAuthor"
            ], response["lastRevision"], response["revisionsHistory"], response["inReadOnlyMode"], response[
                "pendingHpChanges"
            ], response[
                "disabledHpChanges"
            ], response[
                "undoList"
            ], response[
                "redoList"
            ] = (
                cache_dirty,
                record,
                mtime,
                revision_to_timestamp(record_revision),
                revision_author,
                last_revision_ts,
                revisions_history,
                read_only_mode,
                pending_changes,
                disabled_hp_changes,
                undo_list,
                redo_list,
            )
            response["numberOfCopies"] = number_of_physical_copies
            response["bibCirculationUrl"] = bibcirc_details_URL
            response["canRecordHavePhysicalCopies"] = can_have_copies
            # Set tag format from user's session settings.
            try:
                tagformat_settings = session_param_get(req, "bibedit_tagformat")
                tagformat = tagformat_settings[recid]
            except KeyError:
                tagformat = CFG_BIBEDIT_TAG_FORMAT
            response["tagFormat"] = tagformat

    elif request_type == "submit":
        # Submit the record. Possible error situations:
        # - Missing cache file
        # - Cache file modified in other editor
        # - Record locked by other user
        # - Record locked by queue
        # - Invalid XML characters
        # If the cache is outdated cacheOutdated will be set to True in the
        # response.
        if not cache_exists(recid, uid):
            response["resultCode"] = 106
        elif not get_cache_mtime(recid, uid) == data["cacheMTime"]:
            response["resultCode"] = 107
        elif cache_expired(recid, uid) and record_locked_by_other_user(recid, uid):
            response["resultCode"] = 104
        elif record_locked_by_queue(recid):
            response["resultCode"] = 105
        else:
            try:
                tmp_result = get_cache_file_contents(recid, uid)
                record_revision = tmp_result[1]
                record = tmp_result[2]
                pending_changes = tmp_result[3]
                #                disabled_changes = tmp_result[4]

                xml_record = print_rec(record)
                record, status_code, list_of_errors = create_record(xml_record)
                if status_code == 0:
                    response["resultCode"], response["errors"] = 110, list_of_errors
                elif not data["force"] and not latest_record_revision(recid, record_revision):
                    response["cacheOutdated"] = True
                else:
                    save_xml_record(recid, uid)
                    response["resultCode"] = 4
            except:
                response["resultCode"] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV["error_wrong_cache_file_format"]
    elif request_type == "revert":
        revId = data["revId"]
        job_date = "%s-%s-%s %s:%s:%s" % re_revdate_split.search(revId).groups()
        revision_xml = get_marcxml_of_revision(recid, job_date)
        save_xml_record(recid, uid, revision_xml)
        if cache_exists(recid, uid):
            delete_cache_file(recid, uid)
        response["resultCode"] = 4

    elif request_type == "cancel":
        # Cancel editing by deleting the cache file. Possible error situations:
        # - Cache file modified in other editor
        if cache_exists(recid, uid):
            if get_cache_mtime(recid, uid) == data["cacheMTime"]:
                delete_cache_file(recid, uid)
                response["resultCode"] = 5
            else:
                response["resultCode"] = 107
        else:
            response["resultCode"] = 5

    elif request_type == "deleteRecord":
        # Submit the record. Possible error situations:
        # - Record locked by other user
        # - Record locked by queue
        # As the user is requesting deletion we proceed even if the cache file
        # is missing and we don't check if the cache is outdated or has
        # been modified in another editor.
        existing_cache = cache_exists(recid, uid)
        pending_changes = []

        if has_copies(recid):
            response["resultCode"] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV["error_physical_copies_exist"]
        elif existing_cache and cache_expired(recid, uid) and record_locked_by_other_user(recid, uid):
            response["resultCode"] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV["error_rec_locked_by_user"]
        elif record_locked_by_queue(recid):
            response["resultCode"] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV["error_rec_locked_by_queue"]
        else:
            if not existing_cache:
                record_revision, record, pending_changes, deactivated_hp_changes, undo_list, redo_list = create_cache_file(
                    recid, uid
                )
            else:
                try:
                    record_revision, record, pending_changes, deactivated_hp_changes, undo_list, redo_list = get_cache_file_contents(
                        recid, uid
                    )[
                        1:
                    ]
                except:
                    record_revision, record, pending_changes, deactivated_hp_changes = create_cache_file(recid, uid)
            record_add_field(record, "980", " ", " ", "", [("c", "DELETED")])
            undo_list = []
            redo_list = []
            update_cache_file_contents(
                recid, uid, record_revision, record, pending_changes, deactivated_hp_changes, undo_list, redo_list
            )
            save_xml_record(recid, uid)
            delete_related_holdingpen_changes(recid)  # we don't need any changes
            # related to a deleted record
            response["resultCode"] = 10

    elif request_type == "deleteRecordCache":
        # Delete the cache file. Ignore the request if the cache has been
        # modified in another editor.
        if cache_exists(recid, uid) and get_cache_mtime(recid, uid) == data["cacheMTime"]:
            delete_cache_file(recid, uid)
        response["resultCode"] = 11

    elif request_type == "prepareRecordMerge":
        # We want to merge the cache with the current DB version of the record,
        # so prepare an XML file from the file cache, to be used by BibMerge.
        # Possible error situations:
        # - Missing cache file
        # - Record locked by other user
        # - Record locked by queue
        # We don't check if cache is outdated (a likely scenario for this
        # request) or if it has been modified in another editor.
        if not cache_exists(recid, uid):
            response["resultCode"] = 106
        elif cache_expired(recid, uid) and record_locked_by_other_user(recid, uid):
            response["resultCode"] = 104
        elif record_locked_by_queue(recid):
            response["resultCode"] = 105
        else:
            save_xml_record(recid, uid, to_upload=False, to_merge=True)
            response["resultCode"] = 12

    return response
Exemple #13
0
def perform_request_autocomplete(request_type, recid, uid, data):
    """
    Perfrom an AJAX request associated with the retrieval of autocomplete
    data.

    Arguments:
        request_type: Type of the currently served request
        recid: the identifer of the record
        uid: The identifier of the user being currently logged in
        data: The request data containing possibly important additional
              arguments
    """
    response = {}
    # get the values based on which one needs to search
    searchby = data["value"]
    # we check if the data is properly defined
    fulltag = ""
    if data.has_key("maintag") and data.has_key("subtag1") and data.has_key("subtag2") and data.has_key("subfieldcode"):
        maintag = data["maintag"]
        subtag1 = data["subtag1"]
        subtag2 = data["subtag2"]
        u_subtag1 = subtag1
        u_subtag2 = subtag2
        if (not subtag1) or (subtag1 == " "):
            u_subtag1 = "_"
        if (not subtag2) or (subtag2 == " "):
            u_subtag2 = "_"
        subfieldcode = data["subfieldcode"]
        fulltag = maintag + u_subtag1 + u_subtag2 + subfieldcode
    if request_type == "autokeyword":
        # call the keyword-form-ontology function
        if fulltag and searchby:
            items = get_kbt_items_for_bibedit(CFG_BIBEDIT_KEYWORD_TAXONOMY, CFG_BIBEDIT_KEYWORD_RDFLABEL, searchby)
            response["autokeyword"] = items
    if request_type == "autosuggest":
        # call knowledge base function to put the suggestions in an array..
        if fulltag and searchby and len(searchby) > 3:
            suggest_values = get_kbd_values_for_bibedit(fulltag, "", searchby)
            # remove ..
            new_suggest_vals = []
            for sugg in suggest_values:
                if sugg.startswith(searchby):
                    new_suggest_vals.append(sugg)
            response["autosuggest"] = new_suggest_vals
    if request_type == "autocomplete":
        # call the values function with the correct kb_name
        if CFG_BIBEDIT_AUTOCOMPLETE_TAGS_KBS.has_key(fulltag):
            kbname = CFG_BIBEDIT_AUTOCOMPLETE_TAGS_KBS[fulltag]
            # check if the seachby field has semicolons. Take all
            # the semicolon-separated items..
            items = []
            vals = []
            if searchby:
                if searchby.rfind(";"):
                    items = searchby.split(";")
                else:
                    items = [searchby.strip()]
            for item in items:
                item = item.strip()
                kbrvals = get_kbr_values(kbname, item, "", "e")  # we want an exact match
                if kbrvals and kbrvals[0]:  # add the found val into vals
                    vals.append(kbrvals[0])
            # check that the values are not already contained in other
            # instances of this field
            record = get_cache_file_contents(recid, uid)[2]
            xml_rec = print_rec(record)
            record, status_code, dummy_errors = create_record(xml_rec)
            existing_values = []
            if status_code != 0:
                existing_values = record_get_field_values(record, maintag, subtag1, subtag2, subfieldcode)
            # get the new values.. i.e. vals not in existing
            new_vals = vals
            for val in new_vals:
                if val in existing_values:
                    new_vals.remove(val)
            response["autocomplete"] = new_vals
    response["resultCode"] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV["autosuggestion_scanned"]
    return response