def store_aids_live_count(): logger.info("Starting stats 'aid live count' task") # main website aids_live_count = Aid.objects.live().count() log_event('aid', 'live_count', source='aides-territoires', value=aids_live_count) # noqa # all PP for search_page in SearchPage.objects.all(): search_page_aids_live_count = search_page.get_base_queryset().count() log_event('aid', 'live_count', source=search_page.slug, value=search_page_aids_live_count) # noqa
def handle(self, *args, **options): alerts = self.get_alerts() alerted_alerts = [] for alert in alerts: new_aids = list(alert.get_new_aids()) if new_aids: alerted_alerts.append(alert.token) self.send_alert(alert, new_aids) logger.info( 'Sending alert alert email to {}: {} alerts'.format( alert.email, len(new_aids))) updated = Alert.objects \ .filter(token__in=alerted_alerts) \ .update(latest_alert_date=timezone.now()) self.stdout.write('{} alerts sent'.format(updated)) log_event('alert', 'sent', source='send_alerts', value=updated) return
def get(self, request, *args, **kwargs): response = super().get(request, *args, **kwargs) log_event('aid', 'viewed', meta=self.object.slug, value=1) return response
def after_import(self, dataset, result, using_transactions, dry_run, **kwargs): if not dry_run: file_name = kwargs.get('file_name', 'nom du fichier inconnu') success_message = '{} aides total, {} aides crées, {} aids maj'.format( result.total_rows, result.totals['new'], result.totals['update']) log_event('aid', 'import_xlsx_csv', meta=success_message, source=file_name, value=result.total_rows) # noqa
def handle(self, *args, **options): self.populate_cache(*args, **options) data = self.fetch_data(**options) aids_and_related_objects = [] for line in data: if self.line_should_be_processed(line): aids_and_related_objects.append(self.process_line(line)) # Let's try to actually save the imported aid. # # For each aid, we have two cases: # 1) The aid is actually new, so we just create it. # 2) The aid is known from a previous import, in that case, # we just update a few fields but we don't overwrite some # manual modifications that could have been made from our side. # # For the moment, since the data is not huge (there are probably a few # dozains aids per provider at best), I decided to focus on code # readability and not to focus on optimizing the number of db queries. created_counter = 0 updated_counter = 0 with transaction.atomic(): for aid, financers, instructors, categories, programs in aids_and_related_objects: # noqa try: with transaction.atomic(): aid.set_search_vector_unaccented(financers, instructors) aid.save() aid.financers.set(financers) aid.instructors.set(instructors) aid.categories.set(categories) aid.programs.set(programs) created_counter += 1 self.stdout.write(self.style.SUCCESS( 'New aid: {}'.format(aid.name))) except IntegrityError as e: self.stdout.write(self.style.ERROR(str(e))) try: Aid.objects \ .filter(import_uniqueid=aid.import_uniqueid) \ .update( origin_url=aid.origin_url, start_date=aid.start_date, submission_deadline=aid.submission_deadline, import_raw_object=aid.import_raw_object, date_updated=timezone.now(), import_last_access=timezone.now()) updated_counter += 1 self.stdout.write(self.style.SUCCESS( 'Updated aid: {}'.format(aid.name))) except Exception as e: self.stdout.write(self.style.ERROR( 'Cannot update aid {}: {}'.format(aid.name, e))) except Exception as e: self.stdout.write(self.style.ERROR( 'Cannot import aid {}: {}'.format(aid.name, e))) success_message = '{} aides total, {} aides crées, {} aids maj'.format( len(aids_and_related_objects), created_counter, updated_counter) self.stdout.write(self.style.SUCCESS(success_message)) # log the results (works only for DataSource imports) try: data_source_name = aids_and_related_objects[0][0].import_data_source.name log_event('aid', 'import_api', meta=success_message, source=data_source_name, value=len(aids_and_related_objects)) # noqa except: # noqa pass