def archive(dataset, comment=False): """Archive a dataset""" if dataset.archived: log.warning('Dataset %s already archived, bumping date', dataset) dataset.archived = datetime.now() dataset.save() if comment: log.info('Posting comment for dataset %s...', dataset) lang = current_app.config['DEFAULT_LANGUAGE'] title = current_app.config['ARCHIVE_COMMENT_TITLE'] user_id = current_app.config['ARCHIVE_COMMENT_USER_ID'] if user_id: with i18n.language(lang): msg = theme.render('comments/dataset_archived.txt') message = Message(content=msg, posted_by=user_id) discussion = Discussion(user=user_id, discussion=[message], subject=dataset, title=str(title)) discussion.save() else: log.warning('ARCHIVE_COMMENT_USER_ID not set, skipping comment') log.info('Archived dataset %s', dataset)
def purge_datasets(self): for dataset in Dataset.objects(deleted__ne=None): log.info(f'Purging dataset {dataset}') # Remove followers Follow.objects(following=dataset).delete() # Remove issues Issue.objects(subject=dataset).delete() # Remove discussions Discussion.objects(subject=dataset).delete() # Remove activity Activity.objects(related_to=dataset).delete() # Remove topics' related dataset for topic in Topic.objects(datasets=dataset): datasets = topic.datasets datasets.remove(dataset) topic.update(datasets=datasets) # Remove HarvestItem references HarvestJob.objects(items__dataset=dataset).update( set__items__S__dataset=None) # Remove each dataset's resource's file storage = storages.resources for resource in dataset.resources: if resource.fs_filename is not None: storage.delete(resource.fs_filename) # Remove each dataset related community resource and it's file community_resources = CommunityResource.objects(dataset=dataset) for community_resource in community_resources: if community_resource.fs_filename is not None: storage.delete(community_resource.fs_filename) community_resource.delete() # Remove dataset dataset.delete()
def purge_reuses(self): for reuse in Reuse.objects(deleted__ne=None): log.info('Purging reuse "{0}"'.format(reuse)) # Remove followers Follow.objects(following=reuse).delete() # Remove issues Issue.objects(subject=reuse).delete() # Remove discussions Discussion.objects(subject=reuse).delete() # Remove activity Activity.objects(related_to=reuse).delete() reuse.delete()
def purge_reuses(self): for reuse in Reuse.objects(deleted__ne=None): log.info('Purging reuse "{0}"'.format(reuse)) # Remove followers Follow.objects(following=reuse).delete() # Remove issues Issue.objects(subject=reuse).delete() # Remove discussions Discussion.objects(subject=reuse).delete() # Remove activity Activity.objects(related_to=reuse).delete() # Remove metrics Metrics.objects(object_id=reuse.id).delete() reuse.delete()
def quality(self): """Return a dict filled with metrics related to the inner quality of the dataset: * number of tags * description length * and so on """ from udata.models import Discussion # noqa: Prevent circular imports result = {} if not self.id: # Quality is only relevant on saved Datasets return result if self.next_update: result['frequency'] = self.frequency result['update_in'] = -(self.next_update - datetime.now()).days if self.tags: result['tags_count'] = len(self.tags) if self.description: result['description_length'] = len(self.description) if self.resources: result['has_resources'] = True result['has_only_closed_or_no_formats'] = all( resource.closed_or_no_format for resource in self.resources) result['has_unavailable_resources'] = not all( self.check_availability()) discussions = Discussion.objects(subject=self) if discussions: result['discussions'] = len(discussions) result['has_untreated_discussions'] = not all( discussion.person_involved(self.owner) for discussion in discussions) result['score'] = self.compute_quality_score(result) return result
def migrate(db): log.info('Processing Discussion user references.') discussions = Discussion.objects(user__ne=None).no_cache().all() remove_count = 0 modif_count = 0 for discussion in discussions: try: discussion.user.id discussion.subject.id except mongoengine.errors.DoesNotExist: discussion.delete() remove_count += 1 continue valid_messages = [] messages = discussion.discussion for message in messages: try: message.posted_by.id valid_messages.append(message) except mongoengine.errors.DoesNotExist: pass if len(valid_messages) != len(messages): discussion.discussion = valid_messages discussion.save() modif_count += 1 log.info( f'Modified {modif_count} Discussion objects, deleted {remove_count}') log.info('Processing Badges user references.') organizations = Organization.objects.filter(badges__0__exists=True) count = 0 for org in organizations: for badge in org.badges: try: badge.created_by and badge.created_by.id except mongoengine.errors.DoesNotExist: count += 1 badge.created_by = None org.save() log.info(f'Modified {count} badges') log.info('Processing Request user references.') organizations = Organization.objects.filter(requests__0__exists=True) count = 0 for org in organizations: for request in org.requests: try: request.handled_by and request.handled_by.id except mongoengine.errors.DoesNotExist: count += 1 request.handled_by = None org.save() log.info(f'Modified {count} requests')
def purge_reuses(self): for reuse in Reuse.objects(deleted__ne=None): log.info(f'Purging reuse {reuse}') # Remove followers Follow.objects(following=reuse).delete() # Remove discussions Discussion.objects(subject=reuse).delete() # Remove activity Activity.objects(related_to=reuse).delete() # Remove transfers Transfer.objects(subject=reuse).delete() # Remove reuse's logo in all sizes if reuse.image.filename is not None: storage = storages.images storage.delete(reuse.image.filename) storage.delete(reuse.image.original) for key, value in reuse.image.thumbnails.items(): storage.delete(value) reuse.delete()
def purge_datasets(self): for dataset in Dataset.objects(deleted__ne=None): log.info('Purging dataset "{0}"'.format(dataset)) # Remove followers Follow.objects(following=dataset).delete() # Remove issues Issue.objects(subject=dataset).delete() # Remove discussions Discussion.objects(subject=dataset).delete() # Remove activity Activity.objects(related_to=dataset).delete() # Remove metrics Metrics.objects(object_id=dataset.id).delete() # Remove topics' related dataset for topic in Topic.objects(datasets=dataset): datasets = topic.datasets datasets.remove(dataset) topic.update(datasets=datasets) # Remove dataset.delete()
def purge_datasets(self): for dataset in Dataset.objects(deleted__ne=None): log.info('Purging dataset "{0}"'.format(dataset)) # Remove followers Follow.objects(following=dataset).delete() # Remove issues Issue.objects(subject=dataset).delete() # Remove discussions Discussion.objects(subject=dataset).delete() # Remove activity Activity.objects(related_to=dataset).delete() # Remove topics' related dataset for topic in Topic.objects(datasets=dataset): datasets = topic.datasets datasets.remove(dataset) topic.update(datasets=datasets) # Remove HarvestItem references HarvestJob.objects(items__dataset=dataset).update( set__items__S__dataset=None) # Remove dataset.delete()
def purge_datasets(self): for dataset in Dataset.objects(deleted__ne=None): log.info(f'Purging dataset {dataset}') # Remove followers Follow.objects(following=dataset).delete() # Remove discussions Discussion.objects(subject=dataset).delete() # Remove activity Activity.objects(related_to=dataset).delete() # Remove topics' related dataset for topic in Topic.objects(datasets=dataset): datasets = topic.datasets datasets.remove(dataset) topic.update(datasets=datasets) # Remove HarvestItem references HarvestJob.objects(items__dataset=dataset).update( set__items__S__dataset=None) # Remove associated Transfers Transfer.objects(subject=dataset).delete() # Remove each dataset's resource's file storage = storages.resources for resource in dataset.resources: if resource.fs_filename is not None: storage.delete(resource.fs_filename) # Not removing the resource from dataset.resources # with `dataset.remove_resource` as removing elements # from a list while iterating causes random effects. Dataset.on_resource_removed.send(Dataset, document=dataset, resource_id=resource.id) # Remove each dataset related community resource and it's file community_resources = CommunityResource.objects(dataset=dataset) for community_resource in community_resources: if community_resource.fs_filename is not None: storage.delete(community_resource.fs_filename) community_resource.delete() # Remove dataset dataset.delete()
def discussions_notifications(user): '''Notify user about open discussions''' orgs = [o for o in user.organizations if o.is_member(user)] datasets = Dataset.objects.owned_by(user, *orgs) reuses = Reuse.objects.owned_by(user, *orgs) notifications = [] for discussion in Discussion.objects( subject__in=list(datasets) + list(reuses), closed__exists=False): notifications.append((discussion.created, { 'id': discussion.id, 'title': discussion.title, 'subject': { 'id': discussion.subject.id, 'type': discussion.subject.__class__.__name__.lower(), } })) return notifications
def count_discussions(self): from udata.models import Discussion self.metrics['discussions'] = Discussion.objects(subject=self, closed=None).count() self.save()
def get(self): '''List all current user pending notifications''' user = current_user._get_current_object() notifications = [] orgs = [o for o in user.organizations if o.is_member(user)] datasets = Dataset.objects.owned_by(user, *orgs) reuses = Reuse.objects.owned_by(user, *orgs) # TODO: use polymorph field # Fetch user open issues for issue in Issue.objects(subject__in=list(datasets) + list(reuses)): notifications.append({ 'type': 'issue', 'created_on': issue.created, 'details': { 'id': str(issue.id), 'title': issue.title, 'subject': { 'id': str(issue.subject.id), 'type': issue.subject.__class__.__name__.lower(), } } }) # Fetch user open discussions for discussion in Discussion.objects(subject__in=list(datasets) + list(reuses)): notifications.append({ 'type': 'discussion', 'created_on': discussion.created, 'details': { 'id': str(discussion.id), 'title': discussion.title, 'subject': { 'id': str(discussion.subject.id), 'type': discussion.subject.__class__.__name__.lower(), } } }) # Fetch pending membership requests for org in orgs: for request in org.pending_requests: notifications.append({ 'type': 'membership_request', 'created_on': request.created, 'details': { 'organization': org.id, 'user': { 'id': str(request.user.id), 'fullname': request.user.fullname, 'avatar': str(request.user.avatar) } } }) # Fetch pending transfer requests for transfer in Transfer.objects(recipient__in=[user] + orgs, status='pending'): notifications.append({ 'type': 'transfer_request', 'created_on': transfer.created, 'details': { 'id': str(transfer.id), 'subject': { 'class': transfer.subject.__class__.__name__.lower(), 'id': str(transfer.subject.id) } } }) return notifications
def get(self): """List all current user pending notifications""" user = current_user._get_current_object() notifications = [] orgs = [o for o in user.organizations if o.is_member(user)] datasets = Dataset.objects.owned_by(user, *orgs) reuses = Reuse.objects.owned_by(user, *orgs) # TODO: use polymorph field # Fetch user open issues for issue in Issue.objects(subject__in=list(datasets) + list(reuses)): notifications.append( { "type": "issue", "created_on": issue.created, "details": { "id": str(issue.id), "title": issue.title, "subject": {"id": str(issue.subject.id), "type": issue.subject.__class__.__name__.lower()}, }, } ) # Fetch user open discussions for discussion in Discussion.objects(subject__in=list(datasets) + list(reuses)): notifications.append( { "type": "discussion", "created_on": discussion.created, "details": { "id": str(discussion.id), "title": discussion.title, "subject": { "id": str(discussion.subject.id), "type": discussion.subject.__class__.__name__.lower(), }, }, } ) # Fetch pending membership requests for org in orgs: for request in org.pending_requests: notifications.append( { "type": "membership_request", "created_on": request.created, "details": { "organization": org.id, "user": { "id": str(request.user.id), "fullname": request.user.fullname, "avatar": str(request.user.avatar), }, }, } ) # Fetch pending transfer requests for transfer in Transfer.objects(recipient__in=[user] + orgs, status="pending"): notifications.append( { "type": "transfer_request", "created_on": transfer.created, "details": { "id": str(transfer.id), "subject": { "class": transfer.subject.__class__.__name__.lower(), "id": str(transfer.subject.id), }, }, } ) return notifications