示例#1
0
def _get_record(recid, uid, result, fresh_record=False):
    """Retrieve record structure.
    """
    record = None
    mtime = None
    cache_dirty = None
    record_status = record_exists(recid)
    existing_cache = cache_exists(recid, uid)
    if record_status == 0:
        result['resultCode'], result[
            'resultText'] = 1, 'Non-existent record: %s' % recid
    elif record_status == -1:
        result['resultCode'], result[
            'resultText'] = 1, 'Deleted record: %s' % recid
    elif not existing_cache and record_locked_by_other_user(recid, uid):
        result['resultCode'], result[
            'resultText'] = 1, 'Record %s locked by user' % recid
    elif existing_cache and cache_expired(recid, uid) and \
        record_locked_by_other_user(recid, uid):
        result['resultCode'], result[
            'resultText'] = 1, 'Record %s locked by user' % recid
    elif record_locked_by_queue(recid):
        result['resultCode'], result[
            'resultText'] = 1, 'Record %s locked by queue' % recid
    else:
        if fresh_record:
            delete_cache(recid, uid)
            existing_cache = False
        if not existing_cache:
            record_revision, record = create_cache(recid, uid)
            mtime = get_cache_mtime(recid, uid)
            cache_dirty = False
        else:
            tmpRes = get_cache_contents(recid, uid)
            cache_dirty, record_revision, record = tmpRes[0], tmpRes[
                1], tmpRes[2]
            touch_cache(recid, uid)
            mtime = get_cache_mtime(recid, uid)
            if not latest_record_revision(recid, record_revision):
                result['cacheOutdated'] = True
        result['resultCode'], result['resultText'], result[
            'cacheDirty'], result[
                'cacheMTime'] = 0, 'Record OK', cache_dirty, mtime
    record_order_subfields(record)
    return record
def _get_record(recid, uid, result, fresh_record=False):
    """Retrieve record structure.
    """
    record = None
    mtime = None
    cache_dirty = None
    record_status = record_exists(recid)
    existing_cache = cache_exists(recid, uid)
    if record_status == 0:
        result["resultCode"], result["resultText"] = 1, "Non-existent record: %s" % recid
    elif record_status == -1:
        result["resultCode"], result["resultText"] = 1, "Deleted record: %s" % recid
    elif not existing_cache and record_locked_by_other_user(recid, uid):
        result["resultCode"], result["resultText"] = 1, "Record %s locked by user" % recid
    elif existing_cache and cache_expired(recid, uid) and record_locked_by_other_user(recid, uid):
        result["resultCode"], result["resultText"] = 1, "Record %s locked by user" % recid
    elif record_locked_by_queue(recid):
        result["resultCode"], result["resultText"] = 1, "Record %s locked by queue" % recid
    else:
        if fresh_record:
            delete_cache_file(recid, uid)
            existing_cache = False
        if not existing_cache:
            record_revision, record = create_cache_file(recid, uid)
            mtime = get_cache_mtime(recid, uid)
            cache_dirty = False
        else:
            tmpRes = get_cache_file_contents(recid, uid)
            cache_dirty, record_revision, record = tmpRes[0], tmpRes[1], tmpRes[2]
            touch_cache_file(recid, uid)
            mtime = get_cache_mtime(recid, uid)
            if not latest_record_revision(recid, record_revision):
                result["cacheOutdated"] = True
        result["resultCode"], result["resultText"], result["cacheDirty"], result["cacheMTime"] = (
            0,
            "Record OK",
            cache_dirty,
            mtime,
        )
    return record
示例#3
0
def _get_record(recid, uid, result, fresh_record=False):
    """Retrieve record structure.
    """
    record = None
    mtime = None
    cache_dirty = None
    record_status = record_exists(recid)
    existing_cache = cache_exists(recid, uid)
    if record_status == 0:
        result['resultCode'], result['resultText'] = 1, 'Non-existent record: %s' % recid
    elif record_status == -1:
        result['resultCode'], result['resultText'] = 1, 'Deleted record: %s' % recid
    elif not existing_cache and record_locked_by_other_user(recid, uid):
        result['resultCode'], result['resultText'] = 1, 'Record %s locked by user' % recid
    elif existing_cache and cache_expired(recid, uid) and \
        record_locked_by_other_user(recid, uid):
        result['resultCode'], result['resultText'] = 1, 'Record %s locked by user' % recid
    elif record_locked_by_queue(recid):
        result['resultCode'], result['resultText'] = 1, 'Record %s locked by queue' % recid
    else:
        if fresh_record:
            delete_cache(recid, uid)
            existing_cache = False
        if not existing_cache:
            record_revision, record = create_cache(recid, uid)
            mtime = get_cache_mtime(recid, uid)
            cache_dirty = False
        else:
            tmpRes = get_cache_contents(recid, uid)
            cache_dirty, record_revision, record = tmpRes[0], tmpRes[1], tmpRes[2]
            touch_cache(recid, uid)
            mtime = get_cache_mtime(recid, uid)
            if not latest_record_revision(recid, record_revision):
                result['cacheOutdated'] = True
        result['resultCode'], result['resultText'], result['cacheDirty'], result['cacheMTime'] = 0, 'Record OK', cache_dirty, mtime
    record_order_subfields(record)
    return record
def perform_request_update_record(request_type, recid, uid, cacheMTime, data, changeApplied, isBulk=False):
    """Handle record update requests like adding, modifying, moving or deleting
    of fields or subfields. Possible common error situations:
    - Missing cache file
    - Cache file modified in other editor
    """

    response = {}

    if not cache_exists(recid, uid):
        response['resultCode'] = 106
    elif not get_cache_mtime(recid, uid) == cacheMTime and isBulk == False:
        # In case of a bulk request, the changes are deliberately performed imemdiately one after another
        response['resultCode'] = 107
    else:
        try:
            record_revision, record, pending_changes, desactivated_hp_changes = get_cache_file_contents(recid, uid)[1:]
        except:
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV['wrong_cache_file_format']
            return response;

        if changeApplied != -1:
            pending_changes = pending_changes[:changeApplied] + pending_changes[changeApplied+1:]

        field_position_local = data.get('fieldPosition')
        if field_position_local is not None:
            field_position_local = int(field_position_local)
        if request_type == 'overrideChangesList':
            pending_changes = data['newChanges']
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV['editor_modifications_changed']
        elif request_type == 'removeChange':
            #the change is removed automatically by passing the changeApplied parameter
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV['editor_modifications_changed']
        elif request_type == 'desactivateHoldingPenChangeset':
            # the changeset has been marked as processed ( user applied it in the editor)
            # marking as used in the cache file
            # CAUTION: This function has been implemented here because logically it fits
            #          with the modifications made to the cache file. No changes are made to the
            #          Holding Pen physically. The changesets are related to the cache because
            #          we want to cancel the removal every time the cache disappears for any reason
            desactivated_hp_changes[data.get('desactivatedChangeset')] = True;
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV['disabled_hp_changeset']
        elif request_type == 'addField':
            if data['controlfield']:
                record_add_field(record, data['tag'],
                                 controlfield_value=data['value'])
                response['resultCode'] = 20
            else:
                record_add_field(record, data['tag'], data['ind1'],
                                 data['ind2'], subfields=data['subfields'],
                                 field_position_local=field_position_local)
                response['resultCode'] = 21

        elif request_type == 'addSubfields':
            subfields = data['subfields']
            for subfield in subfields:
                record_add_subfield_into(record, data['tag'], subfield[0],
                    subfield[1], subfield_position=None,
                    field_position_local=field_position_local)
            if len(subfields) == 1:
                response['resultCode'] = 22
            else:
                response['resultCode'] = 23
        elif request_type == 'modifyField': # changing the field structure
            # first remove subfields and then add new... change the indices
            subfields = data['subFields'] # parse the JSON representation of the subfields here

            new_field = create_field(subfields, data['ind1'], data['ind2']);
            record_replace_field(record, data['tag'], new_field, field_position_local = data['fieldPosition'])
            response['resultCode'] = 26
            #response['debuggingValue'] = data['subFields'];

        elif request_type == 'modifyContent':
            if data['subfieldIndex'] != None:
                record_modify_subfield(record, data['tag'],
                    data['subfieldCode'], data['value'],
                    int(data['subfieldIndex']),
                    field_position_local=field_position_local)
            else:
                record_modify_controlfield(record, data['tag'], data["value"],
                  field_position_local=field_position_local)
            response['resultCode'] = 24

        elif request_type == 'moveSubfield':
            record_move_subfield(record, data['tag'],
                int(data['subfieldIndex']), int(data['newSubfieldIndex']),
                field_position_local=field_position_local)
            response['resultCode'] = 25

        elif request_type == 'moveField':
            if data['direction'] == 'up':
                final_position_local = field_position_local-1
            else: # direction is 'down'
                final_position_local = field_position_local+1
            record_move_fields(record, data['tag'], [field_position_local],
                final_position_local)
            response['resultCode'] = 32

        elif request_type == 'deleteFields':
            to_delete = data['toDelete']
            deleted_fields = 0
            deleted_subfields = 0
            for tag in to_delete:
                # Sorting the fields in a edcreasing order by the local position !
                fieldsOrder = to_delete[tag].keys()
                fieldsOrder.sort(lambda a,b: int(b)-int(a))
                for field_position_local in fieldsOrder:
                    if not to_delete[tag][field_position_local]:
                        # No subfields specified - delete entire field.
                        record_delete_field(record, tag,
                            field_position_local=int(field_position_local))
                        deleted_fields += 1
                    else:
                        for subfield_position in \
                                to_delete[tag][field_position_local][::-1]:
                            # Delete subfields in reverse order (to keep the
                            # indexing correct).
                            record_delete_subfield_from(record, tag,
                                int(subfield_position),
                                field_position_local=int(field_position_local))
                            deleted_subfields += 1
            if deleted_fields == 1 and deleted_subfields == 0:
                response['resultCode'] = 26
            elif deleted_fields and deleted_subfields == 0:
                response['resultCode'] = 27
            elif deleted_subfields == 1 and deleted_fields == 0:
                response['resultCode'] = 28
            elif deleted_subfields and deleted_fields == 0:
                response['resultCode'] = 29
            else:
                response['resultCode'] = 30
        response['cacheMTime'], response['cacheDirty'] = \
            update_cache_file_contents(recid, uid, record_revision, record, \
                                       pending_changes, desactivated_hp_changes), \
            True

    return response
def perform_request_record(req, request_type, recid, uid, data):
    """Handle 'major' record related requests like fetching, submitting or
    deleting a record, cancel editing or preparing a record for merging.

    """
    response = {}

    if request_type == 'newRecord':
        # Create a new record.
        new_recid = reserve_record_id()
        new_type = data['newType']
        if new_type == 'empty':
            # Create a new empty record.
            create_cache_file(recid, uid)
            response['resultCode'], response['newRecID'] = 6, new_recid

        elif new_type == 'template':
            # Create a new record from XML record template.
            template_filename = data['templateFilename']
            template = get_record_template(template_filename)
            if not template:
                response['resultCode']  = 108
            else:
                record = create_record(template)[0]
                if not record:
                    response['resultCode']  = 109
                else:
                    record_add_field(record, '001',
                                     controlfield_value=str(new_recid))
                    create_cache_file(new_recid, uid, record, True)
                    response['resultCode'], response['newRecID']  = 7, new_recid

        elif new_type == 'clone':
            # Clone an existing record (from the users cache).
            existing_cache = cache_exists(recid, uid)
            if existing_cache:
                try:
                    record = get_cache_file_contents(recid, uid)[2]
                except:
                    # if, for example, the cache format was wrong (outdated)
                    record = get_bibrecord(recid)
            else:
                # Cache missing. Fall back to using original version.
                record = get_bibrecord(recid)
            record_delete_field(record, '001')
            record_add_field(record, '001', controlfield_value=str(new_recid))
            create_cache_file(new_recid, uid, record, True)
            response['resultCode'], response['newRecID'] = 8, new_recid
    elif request_type == 'getRecord':
        # Fetch the record. Possible error situations:
        # - Non-existing record
        # - Deleted record
        # - Record locked by other user
        # - Record locked by queue
        # A cache file will be created if it does not exist.
        # If the cache is outdated (i.e., not based on the latest DB revision),
        # cacheOutdated will be set to True in the response.
        record_status = record_exists(recid)
        existing_cache = cache_exists(recid, uid)
        read_only_mode = False
        if data.has_key("inReadOnlyMode"):
            read_only_mode = data['inReadOnlyMode']

        if record_status == 0:
            response['resultCode'] = 102
        elif record_status == -1:
            response['resultCode'] = 103
        elif not read_only_mode and not existing_cache and \
                record_locked_by_other_user(recid, uid):
            response['resultCode'] = 104
        elif not read_only_mode and existing_cache and \
                cache_expired(recid, uid) and \
                record_locked_by_other_user(recid, uid):
            response['resultCode'] = 104
        elif not read_only_mode and record_locked_by_queue(recid):
            response['resultCode'] = 105
        else:
            if data.get('deleteRecordCache'):
                delete_cache_file(recid, uid)
                existing_cache = False
                pending_changes = []
                disabled_hp_changes = {}
            if read_only_mode:
                if data.has_key('recordRevision'):
                    record_revision_ts = data['recordRevision']
                    record_xml = get_marcxml_of_revision(recid, record_revision_ts)
                    record = create_record(record_xml)[0]
                    record_revision = timestamp_to_revision(record_revision_ts)
                    pending_changes = []
                    disabled_hp_changes = {}
                else:
                    # a normal cacheless retrieval of a record
                    record = get_bibrecord(recid)
                    record_revision = get_record_last_modification_date(recid)
                    pending_changes = []
                    disabled_hp_changes = {}
                cache_dirty = False
                mtime = 0
            elif not existing_cache:
                record_revision, record = create_cache_file(recid, uid)
                mtime = get_cache_mtime(recid, uid)
                pending_changes = []
                disabled_hp_changes = {}
                cache_dirty = False
            else:
                try:
                    cache_dirty, record_revision, record, pending_changes, disabled_hp_changes= \
                        get_cache_file_contents(recid, uid)
                    touch_cache_file(recid, uid)
                    mtime = get_cache_mtime(recid, uid)
                    if not latest_record_revision(recid, record_revision):
                        response['cacheOutdated'] = True
                except:
                    record_revision, record = create_cache_file(recid, uid)
                    mtime = get_cache_mtime(recid, uid)
                    pending_changes = []
                    disabled_hp_changes = {}
                    cache_dirty = False

            if data['clonedRecord']:
                response['resultCode'] = 9
            else:
                response['resultCode'] = 3

            revision_author = get_record_revision_author(recid, record_revision)
            last_revision_ts = revision_to_timestamp(get_record_last_modification_date(recid))
            revisions_history = get_record_revision_timestamps(recid)

            response['cacheDirty'], response['record'], response['cacheMTime'],\
                response['recordRevision'], response['revisionAuthor'], \
                response['lastRevision'], response['revisionsHistory'], \
                response['inReadOnlyMode'], response['pendingHpChanges'], \
                response['disabledHpChanges'] = cache_dirty, record, mtime, \
                revision_to_timestamp(record_revision), revision_author, \
                last_revision_ts, revisions_history, read_only_mode, pending_changes, \
                disabled_hp_changes
            # Set tag format from user's session settings.
            try:
                tagformat_settings = session_param_get(req, 'bibedit_tagformat')
                tagformat = tagformat_settings[recid]
            except KeyError:
                tagformat = CFG_BIBEDIT_TAG_FORMAT
            response['tagFormat'] = tagformat

    elif request_type == 'submit':
        # Submit the record. Possible error situations:
        # - Missing cache file
        # - Cache file modified in other editor
        # - Record locked by other user
        # - Record locked by queue
        # - Invalid XML characters
        # If the cache is outdated cacheOutdated will be set to True in the
        # response.
        if not cache_exists(recid, uid):
            response['resultCode'] = 106
        elif not get_cache_mtime(recid, uid) == data['cacheMTime']:
            response['resultCode'] = 107
        elif cache_expired(recid, uid) and \
                record_locked_by_other_user(recid, uid):
            response['resultCode'] = 104
        elif record_locked_by_queue(recid):
            response['resultCode'] = 105
        else:
            try:
                record_revision, record, pending_changes, disabled_changes = get_cache_file_contents(recid, uid)[1:]
                xml_record = print_rec(record)
                record, status_code, list_of_errors = create_record(xml_record)
                if status_code == 0:
                    response['resultCode'], response['errors'] = 110, \
                        list_of_errors
                elif not data['force'] and \
                        not latest_record_revision(recid, record_revision):
                    response['cacheOutdated'] = True
                else:
                    save_xml_record(recid, uid)
                    response['resultCode'] = 4
            except:
                response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV['wrong_cache_file_format']
    elif request_type == 'revert':
        revId = data['revId']
        job_date = "%s-%s-%s %s:%s:%s" % re_revdate_split.search(revId).groups()
        revision_xml = get_marcxml_of_revision(recid, job_date)
        save_xml_record(recid, uid, revision_xml)
        if (cache_exists(recid, uid)):
            delete_cache_file(recid, uid)
        response['resultCode'] = 4

    elif request_type == 'cancel':
        # Cancel editing by deleting the cache file. Possible error situations:
        # - Cache file modified in other editor
        if cache_exists(recid, uid):
            if get_cache_mtime(recid, uid) == data['cacheMTime']:
                delete_cache_file(recid, uid)
                response['resultCode'] = 5
            else:
                response['resultCode'] = 107
        else:
            response['resultCode'] = 5

    elif request_type == 'deleteRecord':
        # Submit the record. Possible error situations:
        # - Record locked by other user
        # - Record locked by queue
        # As the user is requesting deletion we proceed even if the cache file
        # is missing and we don't check if the cache is outdated or has
        # been modified in another editor.
        existing_cache = cache_exists(recid, uid)
        pending_changes = []
        if existing_cache and cache_expired(recid, uid) and \
                record_locked_by_other_user(recid, uid):
            response['resultCode'] = 104
        elif record_locked_by_queue(recid):
            response['resultCode'] = 105
        else:
            if not existing_cache:
                record_revision, record, pending_changes, desactivated_hp_changes = create_cache_file(recid, uid)
            else:
                try:
                    record_revision, record, pending_changes, desactivated_hp_changes = get_cache_file_contents(
                        recid, uid)[1:]
                except:
                    record_revision, record, pending_changes, desactivated_hp_changes = create_cache_file(recid, uid)
            record_add_field(record, '980', ' ', ' ', '', [('c', 'DELETED')])
            update_cache_file_contents(recid, uid, record_revision, record, pending_changes, desactivated_hp_changes)
            save_xml_record(recid, uid)
            delete_related_holdingpen_changes(recid) # we don't need any changes related to a deleted record
            response['resultCode'] = 10

    elif request_type == 'deleteRecordCache':
        # Delete the cache file. Ignore the request if the cache has been
        # modified in another editor.
        if cache_exists(recid, uid) and get_cache_mtime(recid, uid) == \
                data['cacheMTime']:
            delete_cache_file(recid, uid)
        response['resultCode'] = 11

    elif request_type == 'prepareRecordMerge':
        # We want to merge the cache with the current DB version of the record,
        # so prepare an XML file from the file cache, to be used by BibMerge.
        # Possible error situations:
        # - Missing cache file
        # - Record locked by other user
        # - Record locked by queue
        # We don't check if cache is outdated (a likely scenario for this
        # request) or if it has been modified in another editor.
        if not cache_exists(recid, uid):
            response['resultCode'] = 106
        elif cache_expired(recid, uid) and \
                record_locked_by_other_user(recid, uid):
            response['resultCode'] = 104
        elif record_locked_by_queue(recid):
            response['resultCode'] = 105
        else:
            save_xml_record(recid, uid, to_upload=False, to_merge=True)
            response['resultCode'] = 12

    return response
示例#6
0
def perform_request_update_record(request_type, recid, uid, cacheMTime, data, \
                                  hpChanges, undoRedoOp, isBulk=False):
    """Handle record update requests like adding, modifying, moving or deleting
    of fields or subfields. Possible common error situations:
    - Missing cache file
    - Cache file modified in other editor
    Explanation of some parameters:
       undoRedoOp - Indicates in "undo"/"redo"/undo_descriptor operation is
                    performed by a current request.
    """

    response = {}

    if not cache_exists(recid, uid):
        response['resultCode'] = 106
    elif not get_cache_mtime(recid, uid) == cacheMTime and isBulk == False:
        # In case of a bulk request, the changes are deliberately performed
        # imemdiately one after another
        response['resultCode'] = 107
    else:
        try:
            record_revision, record, pending_changes, deactivated_hp_changes, \
                undo_list, redo_list = get_cache_file_contents(recid, uid)[1:]
        except:
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV[ \
                'wrong_cache_file_format']
            return response

        # process all the Holding Pen changes operations ... regardles the
        # request type
#        import rpdb2;
#        rpdb2.start_embedded_debugger('password', fAllowRemote=True)
        if hpChanges.has_key("toDisable"):
            for changeId in hpChanges["toDisable"]:
                pending_changes[changeId]["applied_change"] = True

        if hpChanges.has_key("toEnable"):
            for changeId in hpChanges["toEnable"]:
                pending_changes[changeId]["applied_change"] = False

        if hpChanges.has_key("toOverride"):
            pending_changes = hpChanges["toOverride"]

        if hpChanges.has_key("changesetsToDeactivate"):
            for changesetId in hpChanges["changesetsToDeactivate"]:
                deactivated_hp_changes[changesetId] = True

        if hpChanges.has_key("changesetsToActivate"):
            for changesetId in hpChanges["changesetsToActivate"]:
                deactivated_hp_changes[changesetId] = False

        # processing the undo/redo entries
        if undoRedoOp == "undo":
            try:
                redo_list = [undo_list[-1]] + redo_list
                undo_list = undo_list[:-1]
            except:
                raise Exception("An exception occured when undoing previous" + \
                                " operation. Undo list: " + str(undo_list) + \
                                " Redo list " + str(redo_list))
        elif undoRedoOp == "redo":
            try:
                undo_list = undo_list + [redo_list[0]]
                redo_list = redo_list[1:]
            except:
                raise Exception("An exception occured when redoing previous" + \
                                " operation. Undo list: " + str(undo_list) + \
                                " Redo list " + str(redo_list))
        else:
            # This is a genuine operation - we have to add a new descriptor
            # to the undo list and cancel the redo unless the operation is
            # a bulk operation
            if undoRedoOp != None:
                undo_list = undo_list + [undoRedoOp]
                redo_list = []
            else:
                assert isBulk == True

        field_position_local = data.get('fieldPosition')
        if field_position_local is not None:
            field_position_local = int(field_position_local)
        if request_type == 'otherUpdateRequest':
            # An empty request. Might be useful if we want to perform
            # operations that require only the actions performed globally,
            # like modifying the holdingPen changes list
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV[ \
                'editor_modifications_changed']
        elif request_type == 'deactivateHoldingPenChangeset':
            # the changeset has been marked as processed ( user applied it in
            # the editor). Marking as used in the cache file.
            # CAUTION: This function has been implemented here because logically
            #          it fits with the modifications made to the cache file.
            #          No changes are made to the Holding Pen physically. The
            #          changesets are related to the cache because we want to
            #          cancel the removal every time the cache disappears for
            #          any reason
            response['resultCode'] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV[ \
                'disabled_hp_changeset']
        elif request_type == 'addField':
            if data['controlfield']:
                record_add_field(record, data['tag'],
                                 controlfield_value=data['value'])
                response['resultCode'] = 20
            else:
                record_add_field(record, data['tag'], data['ind1'],
                                 data['ind2'], subfields=data['subfields'],
                                 field_position_local=field_position_local)
                response['resultCode'] = 21

        elif request_type == 'addSubfields':
            subfields = data['subfields']
            for subfield in subfields:
                record_add_subfield_into(record, data['tag'], subfield[0],
                    subfield[1], subfield_position=None,
                    field_position_local=field_position_local)
            if len(subfields) == 1:
                response['resultCode'] = 22
            else:
                response['resultCode'] = 23
        elif request_type == 'addFieldsSubfieldsOnPositions':
            #1) Sorting the fields by their identifiers
            fieldsToAdd = data['fieldsToAdd']
            subfieldsToAdd = data['subfieldsToAdd']
            for tag in fieldsToAdd.keys():
                positions = fieldsToAdd[tag].keys()
                positions.sort()
                for position in positions:
                    # now adding fields at a position

                    isControlfield = (len(fieldsToAdd[tag][position][0]) == 0)
                    # if there are n subfields, this is a control field
                    if isControlfield:
                        controlfieldValue = fieldsToAdd[tag][position][3]
                        record_add_field(record, tag, field_position_local = \
                                             int(position), \
                                             controlfield_value = \
                                                 controlfieldValue)
                    else:
                        subfields = fieldsToAdd[tag][position][0]
                        ind1 = fieldsToAdd[tag][position][1]
                        ind2 = fieldsToAdd[tag][position][2]
                        record_add_field(record, tag, ind1, ind2, subfields = \
                                             subfields, field_position_local = \
                                                int(position))
            # now adding the subfields
            for tag in subfieldsToAdd.keys():
                for fieldPosition in subfieldsToAdd[tag].keys(): #now the fields
                                                          #order not important !
                    subfieldsPositions = subfieldsToAdd[tag][fieldPosition]. \
                                           keys()
                    subfieldsPositions.sort()
                    for subfieldPosition in subfieldsPositions:
                        subfield = subfieldsToAdd[tag][fieldPosition]\
                            [subfieldPosition]
                        record_add_subfield_into(record, tag, subfield[0], \
                                                 subfield[1], \
                                                 subfield_position = \
                                                     int(subfieldPosition), \
                                                 field_position_local = \
                                                     int(fieldPosition))

            response['resultCode'] = \
                CFG_BIBEDIT_AJAX_RESULT_CODES_REV['added_positioned_subfields']

        elif request_type == 'modifyField': # changing the field structure
            # first remove subfields and then add new... change the indices
            subfields = data['subFields'] # parse the JSON representation of
                                          # the subfields here

            new_field = create_field(subfields, data['ind1'], data['ind2'])
            record_replace_field(record, data['tag'], new_field, \
                field_position_local = data['fieldPosition'])
            response['resultCode'] = 26

        elif request_type == 'modifyContent':
            if data['subfieldIndex'] != None:
                record_modify_subfield(record, data['tag'],
                    data['subfieldCode'], data['value'],
                    int(data['subfieldIndex']),
                    field_position_local=field_position_local)
            else:
                record_modify_controlfield(record, data['tag'], data["value"],
                  field_position_local=field_position_local)
            response['resultCode'] = 24

        elif request_type == 'moveSubfield':
            record_move_subfield(record, data['tag'],
                int(data['subfieldIndex']), int(data['newSubfieldIndex']),
                field_position_local=field_position_local)
            response['resultCode'] = 25

        elif request_type == 'moveField':
            if data['direction'] == 'up':
                final_position_local = field_position_local-1
            else: # direction is 'down'
                final_position_local = field_position_local+1
            record_move_fields(record, data['tag'], [field_position_local],
                final_position_local)
            response['resultCode'] = 32

        elif request_type == 'deleteFields':
            to_delete = data['toDelete']
            deleted_fields = 0
            deleted_subfields = 0
            for tag in to_delete:
                #Sorting the fields in a edcreasing order by the local position!
                fieldsOrder = to_delete[tag].keys()
                fieldsOrder.sort(lambda a, b: int(b) - int(a))
                for field_position_local in fieldsOrder:
                    if not to_delete[tag][field_position_local]:
                        # No subfields specified - delete entire field.
                        record_delete_field(record, tag,
                            field_position_local=int(field_position_local))
                        deleted_fields += 1
                    else:
                        for subfield_position in \
                                to_delete[tag][field_position_local][::-1]:
                            # Delete subfields in reverse order (to keep the
                            # indexing correct).
                            record_delete_subfield_from(record, tag,
                                int(subfield_position),
                                field_position_local=int(field_position_local))
                            deleted_subfields += 1
            if deleted_fields == 1 and deleted_subfields == 0:
                response['resultCode'] = 26
            elif deleted_fields and deleted_subfields == 0:
                response['resultCode'] = 27
            elif deleted_subfields == 1 and deleted_fields == 0:
                response['resultCode'] = 28
            elif deleted_subfields and deleted_fields == 0:
                response['resultCode'] = 29
            else:
                response['resultCode'] = 30
        response['cacheMTime'], response['cacheDirty'] = \
            update_cache_file_contents(recid, uid, record_revision, record, \
                                       pending_changes, \
                                       deactivated_hp_changes, \
                                       undo_list, redo_list), \
            True

    return response
示例#7
0
def perform_request_record(req, request_type, recid, uid, data, ln=CFG_SITE_LANG):
    """Handle 'major' record related requests like fetching, submitting or
    deleting a record, cancel editing or preparing a record for merging.

    """
    response = {}

    if request_type == "newRecord":
        # Create a new record.
        new_recid = reserve_record_id()
        new_type = data["newType"]
        if new_type == "empty":
            # Create a new empty record.
            create_cache_file(recid, uid)
            response["resultCode"], response["newRecID"] = 6, new_recid

        elif new_type == "template":
            # Create a new record from XML record template.
            template_filename = data["templateFilename"]
            template = get_record_template(template_filename)
            if not template:
                response["resultCode"] = 108
            else:
                record = create_record(template)[0]
                if not record:
                    response["resultCode"] = 109
                else:
                    record_add_field(record, "001", controlfield_value=str(new_recid))
                    create_cache_file(new_recid, uid, record, True)
                    response["resultCode"], response["newRecID"] = 7, new_recid

        elif new_type == "clone":
            # Clone an existing record (from the users cache).
            existing_cache = cache_exists(recid, uid)
            if existing_cache:
                try:
                    record = get_cache_file_contents(recid, uid)[2]
                except:
                    # if, for example, the cache format was wrong (outdated)
                    record = get_bibrecord(recid)
            else:
                # Cache missing. Fall back to using original version.
                record = get_bibrecord(recid)
            record_delete_field(record, "001")
            record_add_field(record, "001", controlfield_value=str(new_recid))
            create_cache_file(new_recid, uid, record, True)
            response["resultCode"], response["newRecID"] = 8, new_recid
    elif request_type == "getRecord":
        # Fetch the record. Possible error situations:
        # - Non-existing record
        # - Deleted record
        # - Record locked by other user
        # - Record locked by queue
        # A cache file will be created if it does not exist.
        # If the cache is outdated (i.e., not based on the latest DB revision),
        # cacheOutdated will be set to True in the response.
        record_status = record_exists(recid)
        existing_cache = cache_exists(recid, uid)
        read_only_mode = False

        if data.has_key("inReadOnlyMode"):
            read_only_mode = data["inReadOnlyMode"]

        if record_status == 0:
            response["resultCode"] = 102
        elif record_status == -1:
            response["resultCode"] = 103
        elif not read_only_mode and not existing_cache and record_locked_by_other_user(recid, uid):
            response["resultCode"] = 104
        elif (
            not read_only_mode
            and existing_cache
            and cache_expired(recid, uid)
            and record_locked_by_other_user(recid, uid)
        ):
            response["resultCode"] = 104
        elif not read_only_mode and record_locked_by_queue(recid):
            response["resultCode"] = 105
        else:
            if data.get("deleteRecordCache"):
                delete_cache_file(recid, uid)
                existing_cache = False
                pending_changes = []
                disabled_hp_changes = {}
            if read_only_mode:
                if data.has_key("recordRevision"):
                    record_revision_ts = data["recordRevision"]
                    record_xml = get_marcxml_of_revision(recid, record_revision_ts)
                    record = create_record(record_xml)[0]
                    record_revision = timestamp_to_revision(record_revision_ts)
                    pending_changes = []
                    disabled_hp_changes = {}
                else:
                    # a normal cacheless retrieval of a record
                    record = get_bibrecord(recid)
                    record_revision = get_record_last_modification_date(recid)
                    pending_changes = []
                    disabled_hp_changes = {}
                cache_dirty = False
                mtime = 0
                undo_list = []
                redo_list = []
            elif not existing_cache:
                record_revision, record = create_cache_file(recid, uid)
                mtime = get_cache_mtime(recid, uid)
                pending_changes = []
                disabled_hp_changes = {}
                undo_list = []
                redo_list = []
                cache_dirty = False
            else:
                # TODO: This try except should be replaced with something nicer,
                #      like an argument indicating if a new cache file is to
                #      be created
                try:
                    cache_dirty, record_revision, record, pending_changes, disabled_hp_changes, undo_list, redo_list = get_cache_file_contents(
                        recid, uid
                    )
                    touch_cache_file(recid, uid)
                    mtime = get_cache_mtime(recid, uid)
                    if not latest_record_revision(recid, record_revision) and get_record_revisions(recid) != ():
                        # This sould prevent from using old cache in case of
                        # viewing old version. If there are no revisions,
                        # it means we should skip this step because this
                        # is a new record
                        response["cacheOutdated"] = True

                except:
                    record_revision, record = create_cache_file(recid, uid)
                    mtime = get_cache_mtime(recid, uid)
                    pending_changes = []
                    disabled_hp_changes = {}
                    cache_dirty = False
                    undo_list = []
                    redo_list = []
            if data["clonedRecord"]:
                response["resultCode"] = 9
            else:
                response["resultCode"] = 3
            revision_author = get_record_revision_author(recid, record_revision)
            last_revision_ts = revision_to_timestamp(get_record_last_modification_date(recid))
            revisions_history = get_record_revision_timestamps(recid)
            number_of_physical_copies = get_number_copies(recid)
            bibcirc_details_URL = create_item_details_url(recid, ln)
            can_have_copies = can_record_have_physical_copies(recid)

            response["cacheDirty"], response["record"], response["cacheMTime"], response["recordRevision"], response[
                "revisionAuthor"
            ], response["lastRevision"], response["revisionsHistory"], response["inReadOnlyMode"], response[
                "pendingHpChanges"
            ], response[
                "disabledHpChanges"
            ], response[
                "undoList"
            ], response[
                "redoList"
            ] = (
                cache_dirty,
                record,
                mtime,
                revision_to_timestamp(record_revision),
                revision_author,
                last_revision_ts,
                revisions_history,
                read_only_mode,
                pending_changes,
                disabled_hp_changes,
                undo_list,
                redo_list,
            )
            response["numberOfCopies"] = number_of_physical_copies
            response["bibCirculationUrl"] = bibcirc_details_URL
            response["canRecordHavePhysicalCopies"] = can_have_copies
            # Set tag format from user's session settings.
            try:
                tagformat_settings = session_param_get(req, "bibedit_tagformat")
                tagformat = tagformat_settings[recid]
            except KeyError:
                tagformat = CFG_BIBEDIT_TAG_FORMAT
            response["tagFormat"] = tagformat

    elif request_type == "submit":
        # Submit the record. Possible error situations:
        # - Missing cache file
        # - Cache file modified in other editor
        # - Record locked by other user
        # - Record locked by queue
        # - Invalid XML characters
        # If the cache is outdated cacheOutdated will be set to True in the
        # response.
        if not cache_exists(recid, uid):
            response["resultCode"] = 106
        elif not get_cache_mtime(recid, uid) == data["cacheMTime"]:
            response["resultCode"] = 107
        elif cache_expired(recid, uid) and record_locked_by_other_user(recid, uid):
            response["resultCode"] = 104
        elif record_locked_by_queue(recid):
            response["resultCode"] = 105
        else:
            try:
                tmp_result = get_cache_file_contents(recid, uid)
                record_revision = tmp_result[1]
                record = tmp_result[2]
                pending_changes = tmp_result[3]
                #                disabled_changes = tmp_result[4]

                xml_record = print_rec(record)
                record, status_code, list_of_errors = create_record(xml_record)
                if status_code == 0:
                    response["resultCode"], response["errors"] = 110, list_of_errors
                elif not data["force"] and not latest_record_revision(recid, record_revision):
                    response["cacheOutdated"] = True
                else:
                    save_xml_record(recid, uid)
                    response["resultCode"] = 4
            except:
                response["resultCode"] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV["error_wrong_cache_file_format"]
    elif request_type == "revert":
        revId = data["revId"]
        job_date = "%s-%s-%s %s:%s:%s" % re_revdate_split.search(revId).groups()
        revision_xml = get_marcxml_of_revision(recid, job_date)
        save_xml_record(recid, uid, revision_xml)
        if cache_exists(recid, uid):
            delete_cache_file(recid, uid)
        response["resultCode"] = 4

    elif request_type == "cancel":
        # Cancel editing by deleting the cache file. Possible error situations:
        # - Cache file modified in other editor
        if cache_exists(recid, uid):
            if get_cache_mtime(recid, uid) == data["cacheMTime"]:
                delete_cache_file(recid, uid)
                response["resultCode"] = 5
            else:
                response["resultCode"] = 107
        else:
            response["resultCode"] = 5

    elif request_type == "deleteRecord":
        # Submit the record. Possible error situations:
        # - Record locked by other user
        # - Record locked by queue
        # As the user is requesting deletion we proceed even if the cache file
        # is missing and we don't check if the cache is outdated or has
        # been modified in another editor.
        existing_cache = cache_exists(recid, uid)
        pending_changes = []

        if has_copies(recid):
            response["resultCode"] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV["error_physical_copies_exist"]
        elif existing_cache and cache_expired(recid, uid) and record_locked_by_other_user(recid, uid):
            response["resultCode"] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV["error_rec_locked_by_user"]
        elif record_locked_by_queue(recid):
            response["resultCode"] = CFG_BIBEDIT_AJAX_RESULT_CODES_REV["error_rec_locked_by_queue"]
        else:
            if not existing_cache:
                record_revision, record, pending_changes, deactivated_hp_changes, undo_list, redo_list = create_cache_file(
                    recid, uid
                )
            else:
                try:
                    record_revision, record, pending_changes, deactivated_hp_changes, undo_list, redo_list = get_cache_file_contents(
                        recid, uid
                    )[
                        1:
                    ]
                except:
                    record_revision, record, pending_changes, deactivated_hp_changes = create_cache_file(recid, uid)
            record_add_field(record, "980", " ", " ", "", [("c", "DELETED")])
            undo_list = []
            redo_list = []
            update_cache_file_contents(
                recid, uid, record_revision, record, pending_changes, deactivated_hp_changes, undo_list, redo_list
            )
            save_xml_record(recid, uid)
            delete_related_holdingpen_changes(recid)  # we don't need any changes
            # related to a deleted record
            response["resultCode"] = 10

    elif request_type == "deleteRecordCache":
        # Delete the cache file. Ignore the request if the cache has been
        # modified in another editor.
        if cache_exists(recid, uid) and get_cache_mtime(recid, uid) == data["cacheMTime"]:
            delete_cache_file(recid, uid)
        response["resultCode"] = 11

    elif request_type == "prepareRecordMerge":
        # We want to merge the cache with the current DB version of the record,
        # so prepare an XML file from the file cache, to be used by BibMerge.
        # Possible error situations:
        # - Missing cache file
        # - Record locked by other user
        # - Record locked by queue
        # We don't check if cache is outdated (a likely scenario for this
        # request) or if it has been modified in another editor.
        if not cache_exists(recid, uid):
            response["resultCode"] = 106
        elif cache_expired(recid, uid) and record_locked_by_other_user(recid, uid):
            response["resultCode"] = 104
        elif record_locked_by_queue(recid):
            response["resultCode"] = 105
        else:
            save_xml_record(recid, uid, to_upload=False, to_merge=True)
            response["resultCode"] = 12

    return response