def create(self, validated_data): inst = self.context['view'].get_object()['self'] user = self.context['request'].user registration_dicts = validated_data['data'] changes_flag = False for registration_dict in registration_dicts: registration = Registration.load(registration_dict['_id']) if not registration: raise exceptions.NotFound( detail='Registration with id "{}" was not found'.format( registration_dict['_id'])) if not registration.has_permission(user, osf_permissions.WRITE): raise exceptions.PermissionDenied( detail='Write permission on registration {} required'. format(registration_dict['_id'])) if not registration.is_affiliated_with_institution(inst): registration.add_affiliated_institution(inst, user, save=True) changes_flag = True if not changes_flag: raise RelationshipPostMakesNoChanges return { 'data': list(inst.nodes.filter(is_deleted=False, type='osf.registration')), 'self': inst, }
def check_registrations(registration_ids): for r_id in registration_ids: reg = Registration.load(r_id) if not reg: logger.warn('Registration {} not found'.format(r_id)) else: check(reg)
def check_registrations(registration_ids): for r_id in registration_ids: reg = Registration.load(r_id) if not reg: logger.warn('Registration {} not found'.format(r_id)) else: check(reg)
def sync_registration_creator_bibliographic_status(registration_guid): registration = Registration.load(registration_guid) creator = registration.creator creator_contributor_reg = registration.contributor_set.get(user=creator) creator_contributor_node = registration.registered_from.contributor_set.get(user=creator) creator_contributor_reg.visible = creator_contributor_node.visible creator_contributor_reg.save()
def verify_registrations(registration_ids): for r_id in registration_ids: reg = Registration.load(r_id) if not reg: logger.warn('Registration {} not found'.format(r_id)) else: if verify(reg): VERIFIED.append(reg) else: SKIPPED.append(reg)
def verify_registrations(registration_ids): for r_id in registration_ids: reg = Registration.load(r_id) if not reg: logger.warn('Registration {} not found'.format(r_id)) else: if verify(reg): VERIFIED.append(reg) else: SKIPPED.append(reg)
def test_sync_different_registration_creator_bibliographic_status( self, user, project, registration, project_contributor, registration_creator_contrib): # Assert out-of-sync bibliographic status assert project_contributor.visible is False assert registration_creator_contrib.visible is True registration_guid = registration._id sync_registration_creator_bibliographic_status(registration_guid) updated_registration = Registration.load(registration_guid) updated_registration_creator_contrib = updated_registration.contributor_set.get( user=registration.creator) assert updated_registration_creator_contrib.visible is False assert updated_registration_creator_contrib.visible == project_contributor.visible
def perform_destroy(self, instance): data = self.request.data['data'] user = self.request.user ids = [datum['id'] for datum in data] registrations = [] for id_ in ids: registration = Registration.load(id_) if not registration.has_permission(user, osf_permissions.WRITE): raise exceptions.PermissionDenied(detail='Write permission on registration {} required'.format(id_)) registrations.append(registration) for registration in registrations: registration.remove_affiliated_institution(inst=instance['self'], user=user) registration.save()
def perform_destroy(self, instance): data = self.request.data['data'] user = self.request.user ids = [datum['id'] for datum in data] registrations = [] for id_ in ids: registration = Registration.load(id_) if not registration.has_permission(user, osf_permissions.WRITE): raise exceptions.PermissionDenied(detail='Write permission on registration {} required'.format(id_)) registrations.append(registration) for registration in registrations: registration.remove_affiliated_institution(inst=instance['self'], user=user) registration.save()
def test_sync_same_registration_creator_bibliographic_status( self, user, project, registration, registration_creator_contrib): # Assert aligned bibliographic status project_contributor = registration.registered_from.contributor_set.get( user=user) assert project_contributor.visible is True assert registration_creator_contrib.visible is True registration_guid = registration._id sync_registration_creator_bibliographic_status(registration_guid) updated_registration = Registration.load(registration_guid) updated_registration_creator_contrib = updated_registration.contributor_set.get( user=registration.creator) assert updated_registration_creator_contrib.visible is True assert updated_registration_creator_contrib.visible == project_contributor.visible
def main(): args = parse_args() dry = args.dry_run if not dry: # If we're not running in dry mode log everything to a file script_utils.add_file_logger(logger, __file__) else: logger.info('Running in dry mode...') checked_ok, checked_stuck = [], [] force_failed = [] for reg_id in args.registration_ids: logger.info('Processing registration {}'.format(reg_id)) reg = Registration.load(reg_id) is_stuck = not check_registration(reg) if args.check: if is_stuck: checked_stuck.append(reg) else: checked_ok.append(reg) else: if not is_stuck: logger.info( 'Registration {} is not stuck, skipping...'.format(reg)) continue logger.warn('Failing registration {}'.format(reg_id)) if not dry: with transaction.atomic(): archive_job = reg.archive_job archive_job.sent = False archive_job.save() reg.archive_status = ARCHIVER_FORCED_FAILURE archive_fail(reg, errors=reg.archive_job.target_info()) force_failed.append(reg) if checked_ok: logger.info('{} registrations not stuck: {}'.format( len(checked_ok), [e._id for e in checked_ok])) if checked_stuck: logger.warn('{} registrations stuck: {}'.format( len(checked_stuck), [e._id for e in checked_stuck])) if force_failed: logger.info('Force-failed {} registrations: {}'.format( len(force_failed), [e._id for e in force_failed])) print('Done.')
def main(): args = parse_args() dry = args.dry_run if not dry: # If we're not running in dry mode log everything to a file script_utils.add_file_logger(logger, __file__) else: logger.info('Running in dry mode...') checked_ok, checked_stuck = [], [] verified, skipped = [], [] for reg_id in args.registration_ids: reg = Registration.load(reg_id) if args.check: not_stuck = check_registration(reg) if not_stuck: checked_ok.append(reg) else: checked_stuck.append(reg) else: try: logger.info('Verifying {}'.format(reg._id)) verify(reg) except VerificationError as err: logger.error('Skipping {} due to error...'.format(reg._id)) logger.error(err.args[0]) skipped.append(reg) else: verified.append(reg) if not dry: logger.info('Force-archiving {}'.format(reg_id)) force_archive(reg) if checked_ok: logger.info('{} registrations not stuck: {}'.format(len(checked_ok), [e._id for e in checked_ok])) if checked_stuck: logger.warn('{} registrations stuck: {}'.format(len(checked_stuck), [e._id for e in checked_stuck])) if verified: logger.info('{} registrations {}: {}'.format( len(verified), 'archived' if not dry else 'verified', [e._id for e in verified], )) if skipped: logger.error('{} registrations skipped: {}'.format(len(skipped), [e._id for e in skipped])) print('Done.')
def main(): args = parse_args() dry = args.dry_run if not dry: # If we're not running in dry mode log everything to a file script_utils.add_file_logger(logger, __file__) else: logger.info('Running in dry mode...') checked_ok, checked_stuck = [], [] force_failed = [] for reg_id in args.registration_ids: logger.info('Processing registration {}'.format(reg_id)) reg = Registration.load(reg_id) is_stuck = not check_registration(reg) if args.check: if is_stuck: checked_stuck.append(reg) else: checked_ok.append(reg) else: if not is_stuck: logger.info('Registration {} is not stuck, skipping...'.format(reg)) continue logger.warn('Failing registration {}'.format(reg_id)) if not dry: with transaction.atomic(): archive_job = reg.archive_job archive_job.sent = False archive_job.save() reg.archive_status = ARCHIVER_FORCED_FAILURE archive_fail(reg, errors=reg.archive_job.target_info()) force_failed.append(reg) if checked_ok: logger.info('{} registrations not stuck: {}'.format(len(checked_ok), [e._id for e in checked_ok])) if checked_stuck: logger.warn('{} registrations stuck: {}'.format(len(checked_stuck), [e._id for e in checked_stuck])) if force_failed: logger.info('Force-failed {} registrations: {}'.format(len(force_failed), [e._id for e in force_failed])) print('Done.')
def create(self, validated_data): inst = self.context['view'].get_object()['self'] user = self.context['request'].user registration_dicts = validated_data['data'] changes_flag = False for registration_dict in registration_dicts: registration = Registration.load(registration_dict['_id']) if not registration: raise exceptions.NotFound(detail='Registration with id "{}" was not found'.format(registration_dict['_id'])) if not registration.has_permission(user, osf_permissions.WRITE): raise exceptions.PermissionDenied(detail='Write permission on registration {} required'.format(registration_dict['_id'])) if not registration.is_affiliated_with_institution(inst): registration.add_affiliated_institution(inst, user, save=True) changes_flag = True if not changes_flag: raise RelationshipPostMakesNoChanges return { 'data': list(inst.nodes.filter(is_deleted=False, type='osf.registration')), 'self': inst }
def tarchive(reg_id): start_time = timezone.now() dst = Registration.load(reg_id) if not dst or not dst.archiving: raise Exception('Invalid registration _id') assert verify(dst), 'Unable to verify registration' target = dst.archive_job.get_target('github') if not target or target.done: raise Exception('Invalid archive job target') src = dst.registered_from ghns = src.get_addon('github') cli = github3.login(token=ghns.external_account.oauth_key) cli.set_client_id(github_settings.CLIENT_ID, github_settings.CLIENT_SECRET) repo = cli.repository(ghns.user, ghns.repo) logger.info('Downloading tarball of repository...') assert repo.archive('tarball', TAR_PATH) logger.info('Download complete.') with tarfile.open(TAR_PATH) as tf: logger.info('Extracting tarball to {} ...'.format(EXTRACTED_PATH)) tf.extractall(EXTRACTED_PATH) logger.info('Extraction complete.') logger.info('Preparing node for upload...') if dst.files.exclude(type='osf.trashedfolder').filter( name=node_settings.archive_folder_name.replace('/', '-')).exists(): dst.files.exclude(type='osf.trashedfolder').get( name=node_settings.archive_folder_name.replace('/', '-')).delete() logger.info('Preparing to upload...') dst_osfs = dst.get_addon('osfstorage') recursive_upload(reg, EXTRACTED_PATH, dst_osfs.get_root(), name=dst_osfs.archive_folder_name) logger.info('Archive upload complete\nMarking target as archived...') complete_archive_target(dst, 'github') if reg.logs.filter(date__gte=start_time).exists(): logger.info('Cleaning up logs...') reg.logs.filter(date__gte=start_time).update(should_hide=True)