def purge_organizations(): for organization in Organization.objects(deleted__ne=None): log.info('Purging organization "{0}"'.format(organization)) # Remove followers FollowOrg.objects(following=organization).delete() # Remove activity Activity.objects(related_to=organization).delete() Activity.objects(organization=organization).delete() # Remove metrics Metrics.objects(object_id=organization.id).delete() organization.delete()
def purge_organizations(self): for organization in Organization.objects(deleted__ne=None): log.info('Purging organization "{0}"'.format(organization)) # Remove followers FollowOrg.objects(following=organization).delete() # Remove activity Activity.objects(related_to=organization).delete() Activity.objects(organization=organization).delete() # Remove metrics Metrics.objects(object_id=organization.id).delete() # Remove organization.delete()
def purge_reuses(self): for reuse in Reuse.objects(deleted__ne=None): log.info('Purging reuse "{0}"'.format(reuse)) # Remove followers FollowReuse.objects(following=reuse).delete() # Remove issues ReuseIssue.objects(subject=reuse).delete() # Remove activity Activity.objects(related_to=reuse).delete() # Remove metrics Metrics.objects(object_id=reuse.id).delete() reuse.delete()
def purge_datasets(): for dataset in Dataset.objects(deleted__ne=None): log.info('Purging dataset "{0}"'.format(dataset)) # Remove followers FollowDataset.objects(following=dataset).delete() # Remove issues DatasetIssue.objects(subject=dataset).delete() # Remove activity Activity.objects(related_to=dataset).delete() # Remove metrics Metrics.objects(object_id=dataset.id).delete() dataset.delete()
def purge_datasets(self): for dataset in Dataset.objects(deleted__ne=None): log.info('Purging dataset "{0}"'.format(dataset)) # Remove followers FollowDataset.objects(following=dataset).delete() # Remove issues DatasetIssue.objects(subject=dataset).delete() # Remove activity Activity.objects(related_to=dataset).delete() # Remove metrics Metrics.objects(object_id=dataset.id).delete() # Remove dataset.delete()
def purge_reuses(self): for reuse in Reuse.objects(deleted__ne=None): log.info('Purging reuse "{0}"'.format(reuse)) # Remove followers Follow.objects(following=reuse).delete() # Remove issues Issue.objects(subject=reuse).delete() # Remove discussions Discussion.objects(subject=reuse).delete() # Remove activity Activity.objects(related_to=reuse).delete() # Remove metrics Metrics.objects(object_id=reuse.id).delete() reuse.delete()
def get(self, id): """Fetch metrics for an object given its ID""" if id == "site": object_id = current_site.id else: try: object_id = ObjectId(id) except: object_id = id queryset = Metrics.objects(object_id=object_id).order_by("-date") args = parser.parse_args() if args.get("day"): metrics = [queryset(date=args["day"]).first_or_404()] elif args.get("start"): end = args.get("end", date.today().isoformat()) metrics = list(queryset(date__gte=args["start"], date__lte=end)) else: metrics = [queryset.first_or_404()] if not args.get("cumulative") and metrics: # Turn cumulative data into daily counts based on the first # result. Might return negative values if there is a drop. reference_values = metrics[-1].values.copy() for metric in reversed(metrics): current_values = metric.values.copy() metric.values = { name: count - reference_values[name] for name, count in current_values.iteritems() if name in reference_values } reference_values = current_values return marshal(metrics, metrics_fields)
def get(self, id): '''Fetch metrics for an object given its ID''' if id == 'site': object_id = current_site.id else: try: object_id = ObjectId(id) except: object_id = id queryset = Metrics.objects(object_id=object_id).order_by('-date') args = parser.parse_args() if args.get('day'): metrics = [queryset(date=args['day']).first_or_404()] elif args.get('start'): end = args.get('end', date.today().isoformat()) metrics = list(queryset(date__gte=args['start'], date__lte=end)) else: metrics = [queryset.first_or_404()] if not args.get('cumulative') and metrics: # Turn cumulative data into daily counts based on the first # result. Might return negative values if there is a drop. reference_values = metrics[-1].values.copy() for metric in reversed(metrics): current_values = metric.values.copy() metric.values = { name: count - reference_values[name] for name, count in current_values.iteritems() if name in reference_values } reference_values = current_values return marshal(metrics, metrics_fields)
def get(self, id): '''Fetch metrics for an object given its ID''' if id == 'site': object_id = current_site.id else: try: object_id = ObjectId(id) except: object_id = id queryset = Metrics.objects(object_id=object_id).order_by('-date') args = parser.parse_args() if args.get('day'): metrics = [queryset(date=args['day']).first_or_404()] elif args.get('start'): end = args.get('end', date.today().isoformat()) metrics = list(queryset(date__gte=args['start'], date__lte=end)) else: metrics = [queryset.first_or_404()] if not args.get('cumulative') and metrics: # Turn cumulative data into daily counts based on the first # result. Might return negative values if there is a drop. reference_values = metrics[-1].values.copy() for metric in reversed(metrics): current_values = metric.values.copy() metric.values = { name: count - reference_values[name] for name, count in current_values.iteritems() if name in reference_values } reference_values = current_values return metrics
def purge_organizations(self): for organization in Organization.objects(deleted__ne=None): log.info('Purging organization "{0}"'.format(organization)) # Remove followers Follow.objects(following=organization).delete() # Remove activity Activity.objects(related_to=organization).delete() Activity.objects(organization=organization).delete() # Remove metrics Metrics.objects(object_id=organization.id).delete() # Store datasets for later reindexation d_ids = [d.id for d in Dataset.objects(organization=organization)] # Remove organization.delete() # Reindex the datasets that were linked to the organization for dataset in Dataset.objects(id__in=d_ids): reindex(dataset)
def purge_datasets(self): for dataset in Dataset.objects(deleted__ne=None): log.info('Purging dataset "{0}"'.format(dataset)) # Remove followers FollowDataset.objects(following=dataset).delete() # Remove issues DatasetIssue.objects(subject=dataset).delete() # Remove discussions DatasetDiscussion.objects(subject=dataset).delete() # Remove activity Activity.objects(related_to=dataset).delete() # Remove metrics Metrics.objects(object_id=dataset.id).delete() # Remove topics' related dataset for topic in Topic.objects(datasets=dataset): datasets = topic.datasets datasets.remove(dataset) topic.update(datasets=datasets) # Remove dataset.delete()
def get(self, id, period=None, names=None): try: object_id = ObjectId(id) except: object_id = id queryset = Metrics.objects(object_id=object_id).order_by('-date') if period: period = parse_period(period) if isinstance(period, basestring): result = queryset(date=period).first_or_404() else: result = list(queryset(date__gte=period[0], date__lte=period[1])) else: result = queryset.first_or_404() return marshal(result, metrics_fields)
def get(self, id): '''Fetch metrics for an object given its ID''' if id == 'site': object_id = current_site.id else: try: object_id = ObjectId(id) except: object_id = id queryset = Metrics.objects(object_id=object_id).order_by('-date') args = parser.parse_args() if args.get('day'): result = [queryset(date=args['day']).first_or_404()] elif args.get('start'): end = args.get('end', date.today().isoformat()) result = list(queryset(date__gte=args['start'], date__lte=end)) else: result = [queryset.first_or_404()] return marshal(result, metrics_fields)
def get(self, id): """Fetch metrics for an object given its ID""" if id == "site": object_id = current_site.id else: try: object_id = ObjectId(id) except: object_id = id queryset = Metrics.objects(object_id=object_id).order_by("-date") args = parser.parse_args() if args.get("day"): result = [queryset(date=args["day"]).first_or_404()] elif args.get("start"): end = args.get("end", date.today().isoformat()) result = list(queryset(date__gte=args["start"], date__lte=end)) else: result = [queryset.first_or_404()] return marshal(result, metrics_fields)