Пример #1
0
    def read(self, query: str, limit: int = None) -> Dataset:
        """Run a read(select) query to the database.

        It should return a list of tuples,
        the first tuple is metadata, each element is a tuple of column name and data type,
        the rest are the records.

        :param query: the sql query
        :param limit: the maximum number of records to return
        :return: a list of tuples
        """
        data = Dataset()
        cursor = self.connection.cursor()
        query = self.add_row_limit_in_query(query, limit)
        cursor.execute(query)
        result = cursor.fetchall()

        if cursor.description:
            data.headers = [
                '{0}({1})'.format(d[0], self.get_data_type(d[1]))
                for d in cursor.description
            ]
        data.extend(result)
        cursor.close()

        return data
Пример #2
0
 def test_chunks(self):
     data = Dataset(*[('Matteo', 'Guadrini',
                       35), ('Arthur', 'Dent', 42), ('Ford', 'Prefect',
                                                     42)])
     data.extend([('Matteo', 'Guadrini', 35), ('Arthur', 'Dent', 42),
                  ('Ford', 'Prefect', 42)])
     data.headers = ['name', 'surname', 'age']
     self.assertEqual(
         list(pyreports.chunks(data, 4))[0][0], ('Matteo', 'Guadrini', 35))
Пример #3
0
def to_dataset(observations: AnyObservations) -> Dataset:
    """Convert observations to a generic tabular dataset. This can be converted to any of the
    `formats supported by tablib <https://tablib.readthedocs.io/en/stable/formats>`_.
    """
    if isinstance(observations, Dataset):
        return observations

    flat_observations = flatten_observations(observations, flatten_lists=True)
    dataset = Dataset()
    headers, flat_observations = _fix_dimensions(flat_observations)
    dataset.headers = headers
    dataset.extend([item.values() for item in flat_observations])
    return dataset
Пример #4
0
    def export_view(self, request):
        dataset = Dataset(headers=self.export_headers)
        filename = timezone.now().date().strftime(self.export_filename)

        for r in self.get_queryset(request):
            rows = []
            for site in r.sites.all():
                rows.append([
                    site.domain,
                    r.get_full_inbound_route(),
                    r.outbound_route,
                ])
            dataset.extend(rows)

        response = HttpResponse(dataset.csv, content_type='text/csv; charset=utf-8')
        response['Content-Disposition'] = 'attachment; filename="{0}"'.format(filename)
        return response
Пример #5
0
    def export_view(self, request):
        dataset = Dataset(headers=self.export_headers)
        filename = timezone.now().date().strftime(self.export_filename)

        for r in self.get_queryset(request):
            rows = []
            for site in r.sites.all():
                rows.append([
                    site.domain,
                    r.get_full_inbound_route(),
                    r.outbound_route,
                ])
            dataset.extend(rows)

        response = HttpResponse(dataset.csv, content_type='text/csv; charset=utf-8')
        response['Content-Disposition'] = 'attachment; filename="{0}"'.format(filename)
        return response
Пример #6
0
def merge(*datasets):
    """
    Merge two or more dataset in only one

    :param datasets: Dataset object collection
    :return: Dataset
    """
    if len(datasets) >= 2:
        new_data = Dataset()
        # Check len of row
        length_row = len(datasets[0][0])
        for data in datasets:
            if length_row != len(data[0]):
                raise InvalidDimensions('the row are not the same length')
            new_data.extend(data)
        return new_data
    else:
        raise ReportDataError('you can merge two or more dataset object')
Пример #7
0
    def export_view(self, request):
        dataset = Dataset(headers=self.export_headers)
        filename = timezone.now().date().strftime(self.export_filename)
        redirects = self.get_queryset(request).prefetch_related('translations')

        for r in redirects:
            rows = []
            for translation in r.translations.all():
                rows.append([
                    r.site.domain,
                    r.old_path,
                    translation.new_path,
                    translation.language_code,
                ])
            dataset.extend(rows)

        response = HttpResponse(dataset.csv, content_type='text/csv; charset=utf-8')
        response['Content-Disposition'] = 'attachment; filename="{0}"'.format(filename)
        return response
Пример #8
0
    def export_view(self, request):
        dataset = Dataset(headers=self.export_headers)
        filename = timezone.now().date().strftime(self.export_filename)
        redirects = self.get_queryset(request).prefetch_related('translations')

        for r in redirects:
            rows = []
            for translation in r.translations.all():
                rows.append([
                    r.site.domain,
                    r.old_path,
                    translation.new_path,
                    translation.language_code,
                ])
            dataset.extend(rows)

        response = HttpResponse(dataset.csv, content_type='text/csv; charset=utf-8')
        response['Content-Disposition'] = 'attachment; filename="{0}"'.format(filename)
        return response