def create(self, validated_data): node = Node.load(validated_data.pop('node', None)) if not node: raise exceptions.NotFound('Unable to find Node with specified id.') auth = get_user_auth(self.context['request']) if not node.has_permission(auth.user, permissions.ADMIN): raise exceptions.PermissionDenied primary_file = validated_data.pop('primary_file', None) if not primary_file: raise exceptions.ValidationError(detail='You must specify a valid primary_file to create a preprint.') provider = validated_data.pop('provider', None) if not provider: raise exceptions.ValidationError(detail='You must specify a valid provider to create a preprint.') if PreprintService.find(Q('node', 'eq', node) & Q('provider', 'eq', provider)).count(): conflict = PreprintService.find_one(Q('node', 'eq', node) & Q('provider', 'eq', provider)) raise Conflict('Only one preprint per provider can be submitted for a node. Check `meta[existing_resource_id]`.', meta={'existing_resource_id': conflict._id}) preprint = PreprintService(node=node, provider=provider) self.set_field(preprint.set_primary_file, primary_file, auth, save=True) preprint.node._has_abandoned_preprint = True preprint.node.save() return self.update(preprint, validated_data)
def migrate(dry_run=True): node_logs = list( NodeLog.find( Q("action", "in", [NodeLog.PREPRINT_FILE_UPDATED, NodeLog.PREPRINT_INITIATED]) & Q("params.preprint", "exists", False) ) ) logger.info("Preparing to migrate {} NodeLogs".format(len(node_logs))) count = 0 for log in node_logs: preprint = None node_id = log.params.get("node") try: preprint = PreprintService.find_one(Q("node", "eq", node_id)) except NoResultsFound: logger.error("Skipping {}, preprint not found for node: {}".format(log._id, node_id)) continue logger.info("Migrating log - {} - to add params.preprint: {}, ".format(log._id, preprint._id)) log.params["preprint"] = preprint._id log.save() count += 1 logger.info("Migrated {} logs".format(count))
def on_preprint_updated(preprint_id): # WARNING: Only perform Read-Only operations in an asynchronous task, until Repeatable Read/Serializable # transactions are implemented in View and Task application layers. from website.models import PreprintService preprint = PreprintService.load(preprint_id) if settings.SHARE_URL: if not preprint.provider.access_token: raise ValueError( 'No access_token for {}. Unable to send {} to SHARE.'.format( preprint.provider, preprint)) resp = requests.post( '{}api/v2/normalizeddata/'.format(settings.SHARE_URL), json={ 'data': { 'type': 'NormalizedData', 'attributes': { 'tasks': [], 'raw': None, 'data': { '@graph': format_preprint(preprint) } } } }, headers={ 'Authorization': 'Bearer {}'.format(preprint.provider.access_token), 'Content-Type': 'application/vnd.api+json' }) logger.debug(resp.content) resp.raise_for_status()
def migrate(dry_run=True): node_logs = list(NodeLog.find( Q('action', 'in', [NodeLog.PREPRINT_FILE_UPDATED, NodeLog.PREPRINT_INITIATED]) & Q('params.preprint', 'exists', False) )) logger.info('Preparing to migrate {} NodeLogs'.format(len(node_logs))) count = 0 for log in node_logs: preprint = None node_id = log.params.get('node') try: preprint = PreprintService.find_one(Q('node', 'eq', node_id)) except NoResultsFound: logger.error('Skipping {}, preprint not found for node: {}'.format(log._id, node_id)) continue logger.info( 'Migrating log - {} - to add params.preprint: {}, '.format(log._id, preprint._id) ) log.params['preprint'] = preprint._id log.save() count += 1 logger.info('Migrated {} logs'.format(count))
def create(self, validated_data): node = Node.load(validated_data.pop('node', None)) if not node: raise exceptions.NotFound('Unable to find Node with specified id.') elif node.is_deleted: raise exceptions.ValidationError( 'Cannot create a preprint from a deleted node.') auth = get_user_auth(self.context['request']) if not node.has_permission(auth.user, permissions.ADMIN): raise exceptions.PermissionDenied primary_file = validated_data.pop('primary_file', None) if not primary_file: raise exceptions.ValidationError( detail= 'You must specify a valid primary_file to create a preprint.') provider = validated_data.pop('provider', None) if not provider: raise exceptions.ValidationError( detail='You must specify a valid provider to create a preprint.' ) if PreprintService.find( Q('node', 'eq', node) & Q('provider', 'eq', provider)).count(): conflict = PreprintService.find_one( Q('node', 'eq', node) & Q('provider', 'eq', provider)) raise Conflict( 'Only one preprint per provider can be submitted for a node. Check `meta[existing_resource_id]`.', meta={'existing_resource_id': conflict._id}) preprint = PreprintService(node=node, provider=provider) self.set_field(preprint.set_primary_file, primary_file, auth, save=True) preprint.node._has_abandoned_preprint = True preprint.node.save() return self.update(preprint, validated_data)
def main(dry): if dry: logging.warn('DRY mode running') now = datetime.utcnow() initiated_logs = NodeLog.find( Q('action', 'eq', NodeLog.PREPRINT_INITIATED) & Q('date', 'lt', now)) for log in initiated_logs: try: preprint = PreprintService.find_one(Q('node', 'eq', log.node)) log.params.update({ 'preprint': { 'id': preprint._id }, 'service': { 'name': preprint.provider.name } }) logging.info( 'Updating log {} from node {}, with preprint id: {}'.format( log._id, log.node.title, preprint._id)) if not dry: log.save() except NoResultsFound: pass updated_logs = NodeLog.find( Q('action', 'eq', NodeLog.PREPRINT_FILE_UPDATED) & Q('date', 'lt', now)) for log in updated_logs: try: preprint = PreprintService.find_one(Q('node', 'eq', log.node)) log.params.update({'preprint': {'id': preprint._id}}) logging.info( 'Updating log {} from node {}, with preprint id: {}'.format( log._id, log.node.title, preprint._id)) if not dry: log.save() except NoResultsFound: pass
def on_preprint_updated(preprint_id): # WARNING: Only perform Read-Only operations in an asynchronous task, until Repeatable Read/Serializable # transactions are implemented in View and Task application layers. from website.models import PreprintService preprint = PreprintService.load(preprint_id) if settings.SHARE_URL and settings.SHARE_API_TOKEN: resp = requests.post('{}api/v2/normalizeddata/'.format(settings.SHARE_URL), json={ 'created_at': datetime.datetime.utcnow().isoformat(), 'normalized_data': { '@graph': format_preprint(preprint) }, }, headers={'Authorization': 'Bearer {}'.format(settings.SHARE_API_TOKEN)}) logger.debug(resp.content) resp.raise_for_status()
def main(dry): if dry: logging.warn('DRY mode running') now = datetime.utcnow() initiated_logs = NodeLog.find(Q('action', 'eq', NodeLog.PREPRINT_INITIATED) & Q('date', 'lt', now)) for log in initiated_logs: try: preprint = PreprintService.find_one(Q('node', 'eq', log.node)) log.params.update({ 'preprint': { 'id': preprint._id }, 'service': { 'name': preprint.provider.name } }) logging.info('Updating log {} from node {}, with preprint id: {}'.format(log._id, log.node.title, preprint._id)) if not dry: log.save() except NoResultsFound: pass updated_logs = NodeLog.find(Q('action', 'eq', NodeLog.PREPRINT_FILE_UPDATED) & Q('date', 'lt', now)) for log in updated_logs: try: preprint = PreprintService.find_one(Q('node', 'eq', log.node)) log.params.update({ 'preprint': { 'id': preprint._id } }) logging.info('Updating log {} from node {}, with preprint id: {}'.format(log._id, log.node.title, preprint._id)) if not dry: log.save() except NoResultsFound: pass
def find_preprint_provider(node): """ Given a node, find the preprint and the service provider. :param node: the node to which a contributer or preprint author is added :return: the email template """ try: preprint = PreprintService.find_one(Q('node', 'eq', node._id)) provider = preprint.provider if provider._id == 'osf': return 'osf', provider.name else: return 'branded', provider.name # TODO: fine-grained exception handling except Exception: return None, None
def on_preprint_updated(preprint_id): # WARNING: Only perform Read-Only operations in an asynchronous task, until Repeatable Read/Serializable # transactions are implemented in View and Task application layers. from website.models import PreprintService preprint = PreprintService.load(preprint_id) if settings.SHARE_URL: if not preprint.provider.access_token: raise ValueError('No access_token for {}. Unable to send {} to SHARE.'.format(preprint.provider, preprint)) resp = requests.post('{}api/v2/normalizeddata/'.format(settings.SHARE_URL), json={ 'data': { 'type': 'NormalizedData', 'attributes': { 'tasks': [], 'raw': None, 'data': {'@graph': format_preprint(preprint)} } } }, headers={'Authorization': 'Bearer {}'.format(preprint.provider.access_token), 'Content-Type': 'application/vnd.api+json'}) logger.debug(resp.content) resp.raise_for_status()
def on_preprint_updated(preprint_id): # WARNING: Only perform Read-Only operations in an asynchronous task, until Repeatable Read/Serializable # transactions are implemented in View and Task application layers. from website.models import PreprintService preprint = PreprintService.load(preprint_id) if settings.SHARE_URL and settings.SHARE_API_TOKEN: resp = requests.post('{}api/v2/normalizeddata/'.format( settings.SHARE_URL), json={ 'created_at': datetime.datetime.utcnow().isoformat(), 'normalized_data': { '@graph': format_preprint(preprint) }, }, headers={ 'Authorization': 'Bearer {}'.format(settings.SHARE_API_TOKEN) }) logger.debug(resp.content) resp.raise_for_status()
def validate_map_completeness(): logger.info('Validating completeness of PLOS->BePress mapping') assert set([s['text'] for p in PreprintService.find() for hier in p.get_subjects() for s in hier]).issubset(set(PLOS_TO_BP_MAP.keys())),\ 'Subjects not found in map: {}'.format( set([s['text'] for p in PreprintService.find() for hier in p.get_subjects() for s in hier]) - set(PLOS_TO_BP_MAP.keys()) )
def get_queryset(self): query = self.get_query_from_request() return PreprintService.find(query)
def perform_destroy(self, instance): if instance.is_published: raise Conflict('Published preprints cannot be deleted.') PreprintService.remove_one(instance)