def update(dry_run=False):
    created_dict = {'osf': []}
    for hier in BEPRESS_CHANGES['create']:
        new_text = hier.split(':')[-1]
        bepress_parent = BEPRESS_PROVIDER.subjects.get(text=hier.split(':')[-2])
        logger.info('Creating osf - {}'.format(new_text))
        bepress_subject = Subject.objects.create(parent=bepress_parent, provider=BEPRESS_PROVIDER, text=new_text)
        created_dict['osf'].append(new_text)
        for custom_parent in bepress_parent.aliases.all():
            if not bepress_parent.children.count() > 1 or (
                    custom_parent.children.exists() and
                    set(bepress_parent.children.exclude(text=new_text).values_list('text', flat=True)).issubset(set(custom_parent.children.values_list('text', flat=True)))):
                # Either children were included in the custom taxonomy or they didn't exist before, probably
                logger.info('Creating {} - {}'.format(custom_parent.provider._id, new_text))
                Subject.objects.create(parent=custom_parent, provider=custom_parent.provider, text=new_text, bepress_subject=bepress_subject)
                if custom_parent.provider._id not in created_dict:
                    created_dict[custom_parent.provider._id] = []
                created_dict[custom_parent.provider._id].append(new_text)
    for old, new in BEPRESS_CHANGES['rename'].items():
        logger.info('Renaming `{}`->`{}`'.format(old, new))
        to_update = Subject.objects.filter(text=old)
        affected_preprints = set(to_update.exclude(preprints__isnull=True).values_list('preprints__guids___id', flat=True))
        to_update.update(text=new)
        for preprint_id in affected_preprints:
            logger.info('Notifying SHARE about preprint {} change'.format(preprint_id))
            if not dry_run:
                on_preprint_updated(preprint_id)
    for provider_id, list_of_subjs in created_dict.items():
        logger.info('Created {} new subjects on {}: "{}"'.format(len(list_of_subjs), provider_id, ', '.join(list_of_subjs)))
    def test_send_to_share_is_true(self, mock_share, preprint):
        on_preprint_updated(preprint._id)

        data = json.loads(mock_share.calls[-1].request.body.decode())
        assert data['data']['attributes']['data']['@graph']
        assert mock_share.calls[-1].request.headers[
            'Authorization'] == 'Bearer Snowmobiling'
def migrate(dry=True):
    assert settings.SHARE_URL, 'SHARE_URL must be set to migrate.'
    assert settings.SHARE_API_TOKEN, 'SHARE_API_TOKEN must be set to migrate.'
    targets = get_targets()
    target_count = len(targets)
    successes = []
    failures = []
    count = 0

    logger.info('Preparing to migrate {} preprints.'.format(target_count))
    for preprint_id in targets:
        count += 1
        logger.info('{}/{} - {}'.format(count, target_count, preprint_id))
        try:
            if not dry:
                on_preprint_updated(preprint_id, update_share=True)
                # Sleep in order to be nice to EZID
                time.sleep(1)
        except Exception as e:
            # TODO: This reliably fails for certain nodes with
            # IncompleteRead(0 bytes read)
            failures.append(preprint_id)
            logger.warn('Encountered exception {} while posting to SHARE for preprint {}'.format(e, preprint_id))
        else:
            successes.append(preprint_id)

    logger.info('Successes: {}'.format(successes))
    logger.info('Failures: {}'.format(failures))
def update_share_preprint_modified_dates(dry_run=False):
    for preprint in Preprint.objects.filter(date_modified__lt=F('node__modified')):
        if dry_run:
            logger.info('Would have sent ' + preprint._id + ' data to SHARE')
        else:
            on_preprint_updated(preprint._id)
            logger.info(preprint._id + ' data sent to SHARE')
示例#5
0
def migrate():
    assert settings.SHARE_URL, 'SHARE_URL must be set to migrate.'
    assert settings.SHARE_API_TOKEN, 'SHARE_API_TOKEN must be set to migrate.'
    targets = get_targets()
    target_count = len(targets)
    successes = []
    failures = []
    count = 0

    logger.info('Preparing to migrate {} preprints.'.format(target_count))
    for preprint_id in targets:
        count += 1
        logger.info('{}/{} - {}'.format(count, target_count, preprint_id))
        try:
            on_preprint_updated(preprint_id)
        except Exception as e:
            # TODO: This reliably fails for certain nodes with
            # IncompleteRead(0 bytes read)
            failures.append(preprint_id)
            logger.warn(
                'Encountered exception {} while posting to SHARE for preprint {}'
                .format(e, preprint_id))
        else:
            successes.append(preprint_id)

    logger.info('Successes: {}'.format(successes))
    logger.info('Failures: {}'.format(failures))
def update_share_preprint_modified_dates(dry_run=False):
    for preprint in PreprintService.objects.filter(
            date_modified__lt=F('node__date_modified')):
        if dry_run:
            logger.info('Would have sent ' + preprint._id + ' data to SHARE')
        else:
            on_preprint_updated(preprint._id)
            logger.info(preprint._id + ' data sent to SHARE')
示例#7
0
文件: views.py 项目: adlius/osf.io
 def delete(self, request, *args, **kwargs):
     preprint = self.get_object()
     if settings.SHARE_URL and settings.SHARE_API_TOKEN:
         on_preprint_updated(preprint._id)
         update_admin_log(
             user_id=self.request.user.id,
             object_id=preprint._id,
             object_repr='Preprint',
             message='Preprint Reindexed (SHARE): {}'.format(preprint._id),
             action_flag=REINDEX_SHARE
         )
     return redirect(reverse_preprint(self.kwargs.get('guid')))
示例#8
0
def update(dry_run=False):
    created_dict = {'osf': []}
    for hier in BEPRESS_CHANGES['create']:
        new_text = hier.split(':')[-1]
        bepress_parent = BEPRESS_PROVIDER.subjects.get(
            text=hier.split(':')[-2])
        logger.info('Creating osf - {}'.format(new_text))
        bepress_subject = Subject.objects.create(parent=bepress_parent,
                                                 provider=BEPRESS_PROVIDER,
                                                 text=new_text)
        created_dict['osf'].append(new_text)
        for custom_parent in bepress_parent.aliases.all():
            if not bepress_parent.children.count() > 1 or (
                    custom_parent.children.exists() and set(
                        bepress_parent.children.exclude(
                            text=new_text).values_list(
                                'text', flat=True)).issubset(
                                    set(
                                        custom_parent.children.values_list(
                                            'text', flat=True)))):
                # Either children were included in the custom taxonomy or they didn't exist before, probably
                logger.info('Creating {} - {}'.format(
                    custom_parent.provider._id, new_text))
                Subject.objects.create(parent=custom_parent,
                                       provider=custom_parent.provider,
                                       text=new_text,
                                       bepress_subject=bepress_subject)
                if custom_parent.provider._id not in created_dict:
                    created_dict[custom_parent.provider._id] = []
                created_dict[custom_parent.provider._id].append(new_text)
    for old, new in BEPRESS_CHANGES['rename'].items():
        logger.info('Renaming `{}`->`{}`'.format(old, new))
        to_update = Subject.objects.filter(text=old)
        affected_preprints = set(
            to_update.exclude(preprint_services__isnull=True).values_list(
                'preprint_services__guids___id', flat=True))
        to_update.update(text=new)
        for preprint_id in affected_preprints:
            logger.info(
                'Notifying SHARE about preprint {} change'.format(preprint_id))
            if not dry_run:
                on_preprint_updated(preprint_id)
    for provider_id, list_of_subjs in created_dict.items():
        logger.info('Created {} new subjects on {}: "{}"'.format(
            len(list_of_subjs), provider_id, ', '.join(list_of_subjs)))
示例#9
0
def map_preprints_to_custom_subjects(custom_provider,
                                     merge_dict,
                                     dry_run=False):
    for preprint in PreprintService.objects.filter(provider=custom_provider):
        logger.info('Preparing to migrate preprint {}'.format(preprint.id))
        old_hier = preprint.subject_hierarchy
        subjects_to_map = [hier[-1] for hier in old_hier]
        merged_subject_ids = set(
            Subject.objects.filter(provider=custom_provider,
                                   text__in=[
                                       merge_dict[k]
                                       for k in set(merge_dict.keys())
                                       & set([s.text for s in subjects_to_map])
                                   ]).values_list('id', flat=True))
        subject_ids_to_map = set(s.id for s in subjects_to_map
                                 if s.text not in merge_dict.keys())
        aliased_subject_ids = set(
            Subject.objects.filter(bepress_subject__id__in=subject_ids_to_map,
                                   provider=custom_provider).values_list(
                                       'id', flat=True)) | merged_subject_ids
        aliased_hiers = [
            s.object_hierarchy
            for s in Subject.objects.filter(id__in=aliased_subject_ids)
        ]
        old_subjects = list(preprint.subjects.values_list('id', flat=True))
        preprint.subjects.clear()

        for hier in aliased_hiers:
            validate_subject_hierarchy([s._id for s in hier])
            for s in hier:
                preprint.subjects.add(s)

        # Update preprint in SHARE
        if not dry_run:
            on_preprint_updated(preprint._id,
                                old_subjects=old_subjects,
                                update_share=True)
        preprint.reload()
        new_hier = [
            s.object_hierarchy for s in preprint.subjects.exclude(
                children__in=preprint.subjects.all())
        ]
        logger.info(
            'Successfully migrated preprint {}.\n\tOld hierarchy:{}\n\tNew hierarchy:{}'
            .format(preprint.id, old_hier, new_hier))
示例#10
0
def map_preprints_to_custom_subjects(custom_provider, merge_dict, dry_run=False):
    for preprint in PreprintService.objects.filter(provider=custom_provider):
        logger.info('Preparing to migrate preprint {}'.format(preprint.id))
        old_hier = preprint.subject_hierarchy
        subjects_to_map = [hier[-1] for hier in old_hier]
        merged_subject_ids = set(Subject.objects.filter(provider=custom_provider, text__in=[merge_dict[k] for k in set(merge_dict.keys()) & set([s.text for s in subjects_to_map])]).values_list('id', flat=True))
        subject_ids_to_map = set(s.id for s in subjects_to_map if s.text not in merge_dict.keys())
        aliased_subject_ids = set(Subject.objects.filter(bepress_subject__id__in=subject_ids_to_map, provider=custom_provider).values_list('id', flat=True)) | merged_subject_ids
        aliased_hiers = [s.object_hierarchy for s in Subject.objects.filter(id__in=aliased_subject_ids)]
        old_subjects = list(preprint.subjects.values_list('id', flat=True))
        preprint.subjects.clear()
        for hier in aliased_hiers:
            validate_subject_hierarchy([s._id for s in hier])
            for s in hier:
                preprint.subjects.add(s)
        # Update preprint in SHARE
        if not dry_run:
            on_preprint_updated(preprint._id, old_subjects=old_subjects, update_share=True)
        preprint.reload()
        new_hier = [s.object_hierarchy for s in preprint.subjects.exclude(children__in=preprint.subjects.all())]
        logger.info('Successfully migrated preprint {}.\n\tOld hierarchy:{}\n\tNew hierarchy:{}'.format(preprint.id, old_hier, new_hier))
示例#11
0
    def test_send_to_share_is_true(self, mock_requests):
        self.preprint.provider.access_token = 'Snowmobiling'
        self.preprint.provider.save()
        on_preprint_updated(self.preprint._id)

        assert mock_requests.post.called
示例#12
0
    def test_send_to_share_is_true(self, mock_requests):
        self.preprint.provider.access_token = 'Snowmobiling'
        self.preprint.provider.save()
        on_preprint_updated(self.preprint._id)

        assert mock_requests.post.called