def update_file_guid_referent(self, node, event_type, payload, user=None): if event_type not in ('addon_file_moved', 'addon_file_renamed'): return # Nothing to do source, destination = payload['source'], payload['destination'] source_node, destination_node = Node.load(source['node']['_id']), Node.load(destination['node']['_id']) if source['provider'] in settings.ADDONS_BASED_ON_IDS: if event_type == 'addon_file_renamed': return # Node has not changed and provider has not changed # Must be a move if source['provider'] == destination['provider'] and source_node == destination_node: return # Node has not changed and provider has not changed file_guids = BaseFileNode.resolve_class(source['provider'], BaseFileNode.ANY).get_file_guids( materialized_path=source['materialized'] if source['provider'] != 'osfstorage' else source['path'], provider=source['provider'], node=source_node ) for guid in file_guids: obj = Guid.load(guid) if source_node != destination_node and Comment.find(Q('root_target._id', 'eq', guid)).count() != 0: update_comment_node(guid, source_node, destination_node) if source['provider'] != destination['provider'] or source['provider'] != 'osfstorage': old_file = BaseFileNode.load(obj.referent._id) obj.referent = create_new_file(obj, source, destination, destination_node) obj.save() if old_file and not TrashedFileNode.load(old_file._id): old_file.delete()
def get_metadata_files(draft): data = draft.registration_metadata for q, question in get_file_questions('prereg-prize.json'): if not isinstance(data[q]['value'], dict): for i, file_info in enumerate(data[q]['extra']): provider = file_info['data']['provider'] if provider != 'osfstorage': raise Http404( 'File does not exist in OSFStorage ({}: {})'.format( q, question )) file_guid = file_info.get('fileId') if not file_guid: node = Node.load(file_info.get('nodeId')) path = file_info['data'].get('path') item = BaseFileNode.resolve_class( provider, BaseFileNode.FILE ).get_or_create(node, path) file_guid = item.get_guid(create=True)._id data[q]['extra'][i]['fileId'] = file_guid draft.update_metadata(data) draft.save() else: item = BaseFileNode.load(file_info['data']['path'].replace('/', '')) if item is None: raise Http404( 'File with guid "{}" in "{}" does not exist'.format( file_guid, question )) yield item continue for i, file_info in enumerate(data[q]['value']['uploader']['extra']): provider = file_info['data']['provider'] if provider != 'osfstorage': raise Http404( 'File does not exist in OSFStorage ({}: {})'.format( q, question )) file_guid = file_info.get('fileId') if not file_guid: node = Node.load(file_info.get('nodeId')) path = file_info['data'].get('path') item = BaseFileNode.resolve_class( provider, BaseFileNode.FILE ).get_or_create(node, path) file_guid = item.get_guid(create=True)._id data[q]['value']['uploader']['extra'][i]['fileId'] = file_guid draft.update_metadata(data) draft.save() else: item = BaseFileNode.load(file_info['data']['path'].replace('/', '')) if item is None: raise Http404( 'File with guid "{}" in "{}" does not exist'.format( file_guid, question )) yield item
def create_node_from_project_json(egap_assets_path, egap_project_dir, creator): with open(os.path.join(egap_assets_path, egap_project_dir, 'project.json'), 'r') as fp: project_data = json.load(fp) title = project_data['title'] node = Node(title=title, creator=creator) node.save() # must save before adding contribs for auth reasons for contributor in project_data['contributors']: email = '' if contributor.get('email'): email = contributor.get('email').strip() email = email.split('\\u00a0')[0].split(',')[0] if '<' in email: email = email.split('<')[1].replace('>', '') node.add_contributor_registered_or_not( Auth(creator), full_name=contributor['name'], email=email, permissions=WRITE, send_email='false') node.set_visible(creator, visible=False, log=False, save=True) return node
def get_egap_assets(guid, creator_auth): node = Node.load(guid) zip_file = node.files.first() temp_path = tempfile.mkdtemp() url = '{}/v1/resources/{}/providers/osfstorage/{}'.format( WATERBUTLER_INTERNAL_URL, guid, zip_file._id) zip_file = requests.get(url, headers=creator_auth).content egap_assets_path = os.path.join(temp_path, 'egap_assets.zip') with open(egap_assets_path, 'w') as fp: fp.write(zip_file) with ZipFile(egap_assets_path, 'r') as zipObj: zipObj.extractall(temp_path) zip_parent = [ file for file in os.listdir(temp_path) if file not in ('__MACOSX', 'egap_assets.zip') and not check_id(file) ] if zip_parent: zip_parent = zip_parent[0] for i in os.listdir(os.path.join(temp_path, zip_parent)): shutil.move(os.path.join(temp_path, zip_parent, i), os.path.join(temp_path, i)) if zip_parent: os.rmdir(os.path.join(temp_path, zip_parent)) return temp_path
def obj_gen(targets): for u_id, n_dict in targets.items(): try: u = OSFUser.load(u_id) priv = [ n for n in [Node.load(n_id) for n_id in n_dict.get('private', [])] if not n.is_public ] pub = [] for n_id in n_dict.get('public', []): # Add previously-public nodes to private list, as 50>5. # Do not do the reverse. n = Node.load(n_id) if n.is_public: pub.append(n) else: priv.append(n) yield u, pub, priv except Exception: logger.error(f'Unknown exception handling {u_id}, skipping')
def perform_destroy(self, instance): data = self.request.data['data'] user = self.request.user ids = [datum['id'] for datum in data] nodes = [] for id_ in ids: node = Node.load(id_) if not node.has_permission(user, osf_permissions.WRITE): raise exceptions.PermissionDenied(detail='Write permission on node {} required'.format(id_)) nodes.append(node) for node in nodes: node.remove_affiliated_institution(inst=instance['self'], user=user) node.save()
def update_node_links(designated_node, target_node_ids, description): """ Takes designated node, removes current node links and replaces them with node links to target nodes """ logger.info('Repopulating {} with latest {} nodes.'.format(designated_node._id, description)) user = designated_node.creator auth = Auth(user) for pointer in designated_node.nodes_pointer: designated_node.rm_pointer(pointer, auth) for n_id in target_node_ids: n = Node.load(n_id) if is_eligible_node(n): designated_node.add_pointer(n, auth, save=True) logger.info('Added node link {} to {}'.format(n, designated_node))
def get_public_projects(uid=None, user=None): user = user or OSFUser.load(uid) # In future redesign, should be limited for users with many projects / components node_ids = (Node.find_for_user(user, PROJECT_QUERY).filter( is_public=True).get_roots().values_list('id', flat=True)) nodes = ( Node.objects.filter(id__in=set(node_ids)) # Defer some fields that we don't use for rendering node lists .defer('child_node_subscriptions', 'date_created', 'deleted_date', 'description', 'file_guid_to_share_uuids').include( 'guids', 'contributor__user__guids', '_parents__parent__guids').order_by('-date_modified')) return [ serialize_node_summary(node=node, auth=Auth(user), show_path=False) for node in nodes ]
def main(dry_run=True): pending_retractions = Retraction.find( Q('state', 'eq', Retraction.UNAPPROVED)) for retraction in pending_retractions: if should_be_retracted(retraction): if dry_run: logger.warn('Dry run mode') try: parent_registration = Node.find_one( Q('retraction', 'eq', retraction)) except Exception as err: logger.error( 'Could not find registration associated with retraction {}' .format(retraction)) logger.error('Skipping...'.format(retraction)) continue logger.warn( 'Retraction {0} approved. Retracting registration {1}'.format( retraction._id, parent_registration._id)) if not dry_run: with transaction.atomic(): retraction.state = Retraction.APPROVED try: parent_registration.registered_from.add_log( action=NodeLog.RETRACTION_APPROVED, params={ 'node': parent_registration.registered_from._id, 'registration': parent_registration._id, 'retraction_id': parent_registration.retraction._id, }, auth=Auth( parent_registration.retraction.initiated_by), ) retraction.save() parent_registration.update_search() for node in parent_registration.get_descendants_recursive( ): node.update_search() except Exception as err: logger.error('Unexpected error raised when retracting ' 'registration {}. Continuing...'.format( parent_registration)) logger.exception(err)
def main(dry_run=True): init_app(routes=False) new_and_noteworthy_links_node = Node.load(NEW_AND_NOTEWORTHY_LINKS_NODE) new_and_noteworthy_node_ids = get_new_and_noteworthy_nodes(new_and_noteworthy_links_node) update_node_links(new_and_noteworthy_links_node, new_and_noteworthy_node_ids, 'new and noteworthy') try: new_and_noteworthy_links_node.save() logger.info('Node links on {} updated.'.format(new_and_noteworthy_links_node._id)) except (KeyError, RuntimeError) as error: logger.error('Could not migrate new and noteworthy nodes due to error') logger.exception(error) if dry_run: raise RuntimeError('Dry run -- transaction rolled back.')
def get_egap_assets(guid, creator_auth): node = Node.load(guid) zip_file = node.files.first() temp_path = tempfile.mkdtemp() url = '{}/v1/resources/{}/providers/osfstorage/{}'.format( WATERBUTLER_INTERNAL_URL, guid, zip_file._id) zip_file = requests.get(url, headers=creator_auth).content egap_assets_path = os.path.join(temp_path, 'egap_assets.zip') with open(egap_assets_path, 'w') as fp: fp.write(zip_file) with ZipFile(egap_assets_path, 'r') as zipObj: zipObj.extractall(temp_path) return temp_path
def test_detail_view_returns_editable_fields(self, app, user, draft_registration, url_draft_registrations, project_public): res = app.get(url_draft_registrations, auth=user.auth, expect_errors=True) attributes = res.json['data']['attributes'] assert attributes['title'] == project_public.title assert attributes['description'] == project_public.description assert attributes['category'] == project_public.category res.json['data']['links']['self'] == url_draft_registrations relationships = res.json['data']['relationships'] assert Node.load(relationships['branched_from']['data']['id']) == draft_registration.branched_from assert 'affiliated_institutions' in relationships assert 'subjects' in relationships assert 'contributors' in relationships
def remove_logs_and_files(node_guid): assert node_guid, 'Expected truthy node_id, got {}'.format(node_guid) node = Node.load(node_guid) assert node, 'Unable to find node with guid {}'.format(node_guid) for n in node.node_and_primary_descendants(): logger.info('{} - Deleting file versions...'.format(n._id)) for file in n.files.exclude(parent__isnull=True): try: file.versions.exclude( id=file.versions.latest('date_created').id).delete() except file.versions.model.DoesNotExist: # No FileVersions, skip pass logger.info('{} - Deleting trashed file nodes...'.format(n._id)) BaseFileNode.objects.filter( type__in=TrashedFileNode._typedmodels_subtypes, node=n).delete() logger.info('{} - Deleting logs...'.format(n._id)) n.logs.exclude(id=n.logs.earliest().id).delete()
def get_public_projects(uid=None, user=None): user = user or User.load(uid) # In future redesign, should be limited for users with many projects / components node_ids = ( Node.find_for_user(user, PROJECT_QUERY) .filter(is_public=True) .get_roots() .values_list('id', flat=True) ) nodes = ( Node.objects.filter(id__in=set(node_ids)) # Defer some fields that we don't use for rendering node lists .defer('child_node_subscriptions', 'date_created', 'deleted_date', 'description', 'file_guid_to_share_uuids') .include('guids', 'contributor__user__guids', '_parents__parent__guids') .order_by('-date_modified') ) return [ serialize_node_summary(node=node, auth=Auth(user), show_path=False) for node in nodes ]
def create(self, validated_data): inst = self.context['view'].get_object()['self'] user = self.context['request'].user node_dicts = validated_data['data'] changes_flag = False for node_dict in node_dicts: node = Node.load(node_dict['_id']) if not node: raise exceptions.NotFound(detail='Node with id "{}" was not found'.format(node_dict['_id'])) if not node.has_permission(user, osf_permissions.WRITE): raise exceptions.PermissionDenied(detail='Write permission on node {} required'.format(node_dict['_id'])) if not node.is_affiliated_with_institution(inst): node.add_affiliated_institution(inst, user, save=True) changes_flag = True if not changes_flag: raise RelationshipPostMakesNoChanges return { 'data': list(inst.nodes.filter(is_deleted=False, type='osf.node')), 'self': inst }
def get_object(self, node_id): return Node.load(node_id)
def serialize_user(user, node=None, admin=False, full=False, is_profile=False, include_node_counts=False): """ Return a dictionary representation of a registered user. :param User user: A User object :param bool full: Include complete user properties """ from website.project.utils import PROJECT_QUERY contrib = None if isinstance(user, Contributor): contrib = user user = contrib.user fullname = user.display_full_name(node=node) ret = { 'id': str(user._primary_key), 'registered': user.is_registered, 'surname': user.family_name, 'fullname': fullname, 'shortname': fullname if len(fullname) < 50 else fullname[:23] + '...' + fullname[-23:], 'gravatar_url': gravatar( user, use_ssl=True, size=settings.PROFILE_IMAGE_MEDIUM ), 'active': user.is_active, } if node is not None: if admin: flags = { 'visible': False, 'permission': 'read', } else: flags = { 'visible': contrib.visible if isinstance(contrib, Contributor) else node.contributor_set.filter(user=user, visible=True).exists(), 'permission': reduce_permissions(node.get_permissions(user)), } ret.update(flags) if user.is_registered: ret.update({ 'url': user.url, 'absolute_url': user.absolute_url, 'display_absolute_url': user.display_absolute_url, 'date_registered': user.date_registered.strftime('%Y-%m-%d'), }) if full: # Add emails if is_profile: ret['emails'] = [ { 'address': each, 'primary': each.strip().lower() == user.username.strip().lower(), 'confirmed': True, } for each in user.emails.values_list('address', flat=True) ] + [ { 'address': each, 'primary': each.strip().lower() == user.username.strip().lower(), 'confirmed': False } for each in user.get_unconfirmed_emails_exclude_external_identity() ] if user.is_merged: merger = user.merged_by merged_by = { 'id': str(merger._primary_key), 'url': merger.url, 'absolute_url': merger.absolute_url } else: merged_by = None projects = Node.find_for_user(user, PROJECT_QUERY).get_roots() ret.update({ 'activity_points': user.get_activity_points(), 'gravatar_url': gravatar( user, use_ssl=True, size=settings.PROFILE_IMAGE_LARGE ), 'is_merged': user.is_merged, 'merged_by': merged_by, }) if include_node_counts: ret.update({ 'number_projects': projects.count(), 'number_public_projects': projects.filter(is_public=True).count(), }) return ret
def get_default_queryset(self): return Node.find(( Q('is_deleted', 'ne', True) & Q('is_public', 'eq', True) ))
def get_object(self, node_id): try: return Node.load(node_id) except AttributeError: raise exceptions.ValidationError(detail='Node not correctly specified.')