def _get_csv_data(self): project = self.get_project() now = timezone.now().date() retention_limit = self._get_retention_days_limit(project) if retention_limit in [None, -1]: # Unlimited. days_ago = project.pub_date.date() else: days_ago = now - timezone.timedelta(days=retention_limit) values = [ ('Date', 'date'), ('Version', 'version__slug'), ('Path', 'path'), ('Views', 'view_count'), ] data = [] if self._is_enabled(project): data = (PageView.objects.filter( project=project, date__gte=days_ago, ).order_by('-date').values_list(*[value for _, value in values])) filename = 'readthedocs_traffic_analytics_{project_slug}_{start}_{end}.csv'.format( project_slug=project.slug, start=timezone.datetime.strftime(days_ago, '%Y-%m-%d'), end=timezone.datetime.strftime(now, '%Y-%m-%d'), ) csv_data = [[ timezone.datetime.strftime(date, '%Y-%m-%d %H:%M:%S'), *rest ] for date, *rest in data] csv_data.insert(0, [header for header, _ in values]) return get_csv_file(filename=filename, csv_data=csv_data)
def _get_csv_data(self): organization = self.get_organization() current_timezone = settings.TIME_ZONE values = [ (f'Date ({current_timezone})', 'created'), ('User', 'log_user_username'), ('Project', 'log_project_slug'), ('Organization', 'log_organization_slug'), ('Action', 'action'), ('Resource', 'resource'), ('IP', 'ip'), ('Browser', 'browser'), ] data = self.get_queryset().values_list(*[value for _, value in values]) start_date = self._get_start_date() end_date = timezone.now().date() date_filter = self.filter.form.cleaned_data.get('date') if date_filter: start_date = date_filter.start or start_date end_date = date_filter.stop or end_date filename = 'readthedocs_organization_security_logs_{organization}_{start}_{end}.csv'.format( organization=organization.slug, start=timezone.datetime.strftime(start_date, '%Y-%m-%d'), end=timezone.datetime.strftime(end_date, '%Y-%m-%d'), ) csv_data = [[ timezone.datetime.strftime(date, '%Y-%m-%d %H:%M:%S'), *rest ] for date, *rest in data] csv_data.insert(0, [header for header, _ in values]) return get_csv_file(filename=filename, csv_data=csv_data)
def _get_csv_data(self): """Generate raw csv data of search queries.""" project = self.get_project() now = timezone.now().date() retention_limit = self._get_retention_days_limit(project) if retention_limit in [None, -1]: # Unlimited. days_ago = project.pub_date.date() else: days_ago = now - timezone.timedelta(days=retention_limit) values = [ ('Created Date', 'created'), ('Query', 'query'), ('Total Results', 'total_results'), ] data = [] if self._is_enabled(project): data = (SearchQuery.objects.filter( project=project, created__date__gte=days_ago, ).order_by('-created').values_list(*[value for _, value in values])) filename = 'readthedocs_search_analytics_{project_slug}_{start}_{end}.csv'.format( project_slug=project.slug, start=timezone.datetime.strftime(days_ago, '%Y-%m-%d'), end=timezone.datetime.strftime(now, '%Y-%m-%d'), ) csv_data = [[ timezone.datetime.strftime(date, '%Y-%m-%d %H:%M:%S'), *rest ] for date, *rest in data] csv_data.insert(0, [header for header, _ in values]) return get_csv_file(filename=filename, csv_data=csv_data)
def _get_csv_data(self): values = [ ('Date', 'created'), ('User', 'log_user_username'), ('Project', 'log_project_slug'), ('Organization', 'log_organization_slug'), ('Action', 'action'), ('IP', 'ip'), ('Browser', 'browser'), ] data = self._get_queryset().values_list(*[value for _, value in values]) now = timezone.now() days_ago = now - timedelta(days=self.days_limit) filename = 'readthedocs_user_security_logs_{username}_{start}_{end}.csv'.format( username=self.request.user.username, start=timezone.datetime.strftime(days_ago, '%Y-%m-%d'), end=timezone.datetime.strftime(now, '%Y-%m-%d'), ) csv_data = [ [timezone.datetime.strftime(date, '%Y-%m-%d %H:%M:%S'), *rest] for date, *rest in data ] csv_data.insert(0, [header for header, _ in values]) return get_csv_file(filename=filename, csv_data=csv_data)