def run(self, *args, **kwargs): from panda.models import SearchSubscription log = logging.getLogger(self.name) log.info('Running subscribed searches') subscriptions = SearchSubscription.objects.all() for sub in subscriptions: log.info('Running subscription: %s' % sub) since = sub.last_run.replace(microsecond=0, tzinfo=None) since = since.isoformat('T') sub.last_run = now() sub.save() solr_query = 'last_modified:[%s TO *] AND (%s)' % (since + 'Z', sub.query) if sub.dataset: solr_query += ' dataset_slug:%s' % (sub.dataset.slug) elif sub.category: dataset_slugs = sub.category.datasets.values_list('slug', flat=True) solr_query += ' dataset_slug:(%s)' % ' '.join(dataset_slugs) response = solr.query(settings.SOLR_DATA_CORE, solr_query, offset=0, limit=0) count = response['response']['numFound'] log.info('Found %i new results' % count) if count: if sub.dataset: url = '#dataset/%s/search/%s/%s' % (sub.dataset.slug, sub.query_url, since) elif sub.category: url = '#search/%s/%s/%s' % (sub.category.slug, sub.query, since) else: url = '#search/all/%s/%s' % (sub.query, since) notify(sub.user, 'subscription_results', 'info', url=url, extra_context={ 'query': sub.query, 'query_url': sub.query_url, 'category': sub.category, 'related_dataset': sub.dataset, 'count': count, 'since': since }) log.info('Finished running subscribed searches')
def run(self, *args, **kwargs): from panda.models import SearchSubscription log = logging.getLogger(self.name) log.info('Running subscribed searches') subscriptions = SearchSubscription.objects.all() for sub in subscriptions: log.info('Running subscription: %s' % sub) since = sub.last_run.replace(microsecond=0, tzinfo=None) since = since.isoformat('T') sub.last_run = now() sub.save() solr_query = 'last_modified:[%s TO *] AND (%s)' % (since + 'Z', sub.query) if sub.dataset: solr_query += ' dataset_slug:%s' % (sub.dataset.slug) elif sub.category: dataset_slugs = sub.category.datasets.values_list('slug', flat=True) solr_query += ' dataset_slug:(%s)' % ' '.join(dataset_slugs) response = solr.query( settings.SOLR_DATA_CORE, solr_query, offset=0, limit=0 ) count = response['response']['numFound'] log.info('Found %i new results' % count) if count: if sub.dataset: url = '#dataset/%s/search/%s/%s' % (sub.dataset.slug, sub.query_url, since) elif sub.category: url = '#search/%s/%s/%s' % (sub.category.slug, sub.query, since) else: url = '#search/all/%s/%s' % (sub.query, since) notify( sub.user, 'subscription_results', 'info', url=url, extra_context={ 'query': sub.query, 'query_url': sub.query_url, 'category': sub.category, 'related_dataset': sub.dataset, 'count': count, 'since': since } ) log.info('Finished running subscribed searches')
def send_notifications(self, dataset, query, retval, einfo): """ Send user notifications this task has finished. """ from panda.models import Export task_status = dataset.current_task export = None extra_context = { 'query': query, 'related_dataset': dataset } url = None if einfo: if hasattr(einfo, 'traceback'): tb = einfo.traceback else: tb = ''.join(traceback.format_tb(einfo[2])) task_status.exception( 'Export failed', u'%s\n\nTraceback:\n%s' % (unicode(retval), tb) ) template_prefix = 'export_failed' extra_context['error'] = unicode(retval) extra_context['traceback'] = tb notification_type = 'Error' elif self.is_aborted(): template_prefix = 'export_aborted' notification_type = 'Info' else: task_status.complete('Export complete') export = Export.objects.create( filename=retval, original_filename=retval, size=os.path.getsize(os.path.join(settings.EXPORT_ROOT, retval)), creator=task_status.creator, creation_date=task_status.start, dataset=dataset) extra_context['related_export'] = export url = '#export/%i' % export.id template_prefix = 'export_complete' notification_type = 'Info' if task_status.creator: notify( task_status.creator, template_prefix, notification_type, url, extra_context=extra_context )
def send_notifications(self, query, task_status, retval, einfo): """ Send user notifications this task has finished. """ from panda.models import Export export = None extra_context = { 'query': query } url = None if einfo: if isinstance(einfo, tuple): tb = '\n'.join(format_tb(einfo[2])) else: tb = einfo.traceback task_status.exception( ugettext('Export failed'), u'%s\n\nTraceback:\n%s' % (unicode(retval), tb) ) template_prefix = 'export_search_failed' extra_context['error'] = unicode(retval) extra_context['traceback'] = tb notification_type = 'Error' elif self.is_aborted(): template_prefix = 'export_search_aborted' notification_type = 'Info' else: task_status.complete(ugettext('Export complete')) export = Export.objects.create( filename=retval, original_filename=retval, size=os.path.getsize(os.path.join(settings.EXPORT_ROOT, retval)), creator=task_status.creator, creation_date=task_status.start, dataset=None) extra_context['related_export'] = export url = '#export/%i' % export.id template_prefix = 'export_search_complete' notification_type = 'Info' if task_status.creator: notify( task_status.creator, template_prefix, notification_type, url, extra_context=extra_context )
def send_notifications(self, dataset, query, retval, einfo): """ Send user notifications this task has finished. """ from panda.models import Export task_status = dataset.current_task export = None extra_context = {"query": query, "related_dataset": dataset} url = None if einfo: if hasattr(einfo, "traceback"): tb = einfo.traceback else: tb = "".join(traceback.format_tb(einfo[2])) task_status.exception(ugettext("Export failed"), u"%s\n\nTraceback:\n%s" % (unicode(retval), tb)) template_prefix = "export_failed" extra_context["error"] = unicode(retval) extra_context["traceback"] = tb notification_type = "Error" elif self.is_aborted(): template_prefix = "export_aborted" notification_type = "Info" else: task_status.complete(ugettext("Export complete")) export = Export.objects.create( filename=retval, original_filename=retval, size=os.path.getsize(os.path.join(settings.EXPORT_ROOT, retval)), creator=task_status.creator, creation_date=task_status.start, dataset=dataset, ) extra_context["related_export"] = export url = "#export/%i" % export.id template_prefix = "export_complete" notification_type = "Info" if task_status.creator: notify(task_status.creator, template_prefix, notification_type, url, extra_context=extra_context)
def send_notifications(self, dataset, retval, einfo): """ Send user notifications this task has finished. """ task_status = dataset.current_task extra_context = { 'related_dataset': dataset } if einfo: if hasattr(einfo, 'traceback'): tb = einfo.traceback else: tb = ''.join(traceback.format_tb(einfo[2])) task_status.exception( 'Import failed', u'%s\n\nTraceback:\n%s' % (unicode(retval), tb) ) template_prefix = 'import_failed' extra_context['error'] = unicode(retval) extra_context['traceback'] = tb notification_type = 'Error' elif self.is_aborted(): template_prefix = 'import_aborted' notification_type = 'Info' else: task_status.complete('Import complete') template_prefix = 'import_complete' extra_context['type_summary'] = retval.summarize() notification_type = 'Info' if task_status.creator: notify( task_status.creator, template_prefix, notification_type, url='#dataset/%s' % dataset.slug, extra_context=extra_context )
def send_notifications(self, dataset, retval, einfo): """ Send user notifications this task has finished. """ task_status = dataset.current_task extra_context = {"related_dataset": dataset} if einfo: if hasattr(einfo, "traceback"): tb = einfo.traceback else: tb = "".join(traceback.format_tb(einfo[2])) task_status.exception("Reindex failed", u"%s\n\nTraceback:\n%s" % (unicode(retval), tb)) template_prefix = "reindex_failed" notification_type = "Error" elif self.is_aborted(): template_prefix = "reindex_aborted" notification_type = "Info" else: task_status.complete("Import complete") template_prefix = "reindex_complete" extra_context["type_summary"] = retval.summarize() notification_type = "Info" if task_status.creator: notify( task_status.creator, template_prefix, notification_type, url="#dataset/%s" % dataset.slug, extra_context=extra_context, )