Exemple #1
0
def export_data_source(request, domain, config_id):
    config = get_document_or_404(DataSourceConfiguration, domain, config_id)
    adapter = IndicatorSqlAdapter(config)
    q = adapter.get_query_object()
    table = adapter.get_table()

    try:
        params = process_url_params(request.GET, table.columns)
    except UserQueryError as e:
        return HttpResponse(e.message, status=400)

    q = q.filter_by(**params.keyword_filters)
    for sql_filter in params.sql_filters:
        q = q.filter(sql_filter)

    # build export
    def get_table(q):
        yield table.columns.keys()
        for row in q:
            yield row

    fd, path = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as tmpfile:
        try:
            tables = [[config.table_id, get_table(q)]]
            export_from_tables(tables, tmpfile, params.format)
        except exc.DataError:
            msg = _("There was a problem executing your query, please make "
                    "sure your parameters are valid.")
            return HttpResponse(msg, status=400)
        return export_response(Temp(path), params.format, config.display_name)
Exemple #2
0
def export_data_source(request, domain, config_id):
    config = get_document_or_404(DataSourceConfiguration, domain, config_id)
    adapter = IndicatorSqlAdapter(config)
    q = adapter.get_query_object()
    table = adapter.get_table()

    try:
        params = process_url_params(request.GET, table.columns)
    except UserQueryError as e:
        return HttpResponse(e.message, status=400)

    q = q.filter_by(**params.keyword_filters)
    for sql_filter in params.sql_filters:
        q = q.filter(sql_filter)

    # build export
    def get_table(q):
        yield table.columns.keys()
        for row in q:
            yield row

    fd, path = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as tmpfile:
        try:
            tables = [[config.table_id, get_table(q)]]
            export_from_tables(tables, tmpfile, params.format)
        except exc.DataError:
            msg = _("There was a problem executing your query, please make "
                    "sure your parameters are valid.")
            return HttpResponse(msg, status=400)
        return export_response(Temp(path), params.format, config.display_name)
Exemple #3
0
 def email_response(self):
     with closing(StringIO()) as temp:
         export_from_tables(self.export_table, temp, Format.HTML)
         return HttpResponse(json.dumps({
             'report': temp.getvalue(),
         }),
                             content_type='application/json')
Exemple #4
0
    def test_export_raw(self):
        headers = (('people', ('name', 'gender')), ('offices', ('location',
                                                                'name')))
        data = (('people', [('danny', 'male'), ('amelia', 'female'),
                            ('carter', 'various')]),
                ('offices', [('Delhi, India', 'DSI'),
                             ('Boston, USA', 'Dimagi, Inc'),
                             ('Capetown, South Africa', 'DSA')]))
        EXPECTED = {
            "offices": {
                "headers": ["location", "name"],
                "rows": [["Delhi, India", "DSI"],
                         ["Boston, USA", "Dimagi, Inc"],
                         ["Capetown, South Africa", "DSA"]]
            },
            "people": {
                "headers": ["name", "gender"],
                "rows": [["danny", "male"], ["amelia", "female"],
                         ["carter", "various"]]
            }
        }

        that = self

        class Tester(object):
            def __enter__(self):
                self.buffer = BytesIO()
                return self.buffer

            def __exit__(self, exc_type, exc_val, exc_tb):
                if exc_type is None:
                    that.assertDictEqual(json.loads(self.buffer.getvalue()),
                                         EXPECTED)
                self.buffer.close()

        with Tester() as buffer:
            export_raw(headers, data, buffer, format=Format.JSON)

        with Tester() as buffer:
            # test lists
            export_raw(list(headers), list(data), buffer, format=Format.JSON)

        with Tester() as buffer:
            # test generators
            export_raw((h for h in headers),
                       ((name, (r for r in rows)) for name, rows in data),
                       buffer,
                       format=Format.JSON)

        with Tester() as buffer:
            # test export_from_tables
            headers = dict(headers)
            data = dict(data)
            tables = {}
            for key in set(headers.keys()) | set(data.keys()):
                tables[key] = itertools.chain([headers[key]], data[key])

            export_from_tables(list(tables.items()),
                               buffer,
                               format=Format.JSON)
Exemple #5
0
def collect_inactive_awws():
    celery_task_logger.info("Started updating the Inactive AWW")
    filename = "inactive_awws_%s.csv" % date.today().strftime('%Y-%m-%d')
    last_sync = IcdsFile.objects.filter(
        data_type='inactive_awws').order_by('-file_added').first()

    # If last sync not exist then collect initial data
    if not last_sync:
        last_sync_date = datetime(2017, 3, 1).date()
    else:
        last_sync_date = last_sync.file_added

    _aggregate_inactive_aww(last_sync_date)

    celery_task_logger.info("Collecting inactive AWW to generate zip file")
    excel_data = AggregateInactiveAWW.objects.all()

    celery_task_logger.info("Preparing data to csv file")
    columns = [x.name for x in AggregateInactiveAWW._meta.fields
               ] + ['days_since_start', 'days_inactive']
    rows = [columns]
    for data in excel_data:
        rows.append([_get_value(data, field) for field in columns])

    celery_task_logger.info("Creating csv file")
    export_file = BytesIO()
    export_from_tables([['inactive AWWSs', rows]], export_file, 'csv')

    celery_task_logger.info("Saving csv file in blobdb")
    sync = IcdsFile(blob_id=filename, data_type='inactive_awws')
    sync.store_file_in_blobdb(export_file)
    sync.save()
    celery_task_logger.info("Ended updating the Inactive AWW")
Exemple #6
0
 def email_response(self):
     fd, path = tempfile.mkstemp()
     with os.fdopen(fd, 'wb') as temp:
         export_from_tables(self.export_table, temp, Format.HTML)
     with open(path) as f:
         return HttpResponse(json.dumps({
             'report': f.read(),
         }))
Exemple #7
0
 def export_response(self):
     """
         Intention: Not to be overridden in general.
         Returns the tabular export of the data, if available.
     """
     temp = StringIO()
     export_from_tables(self.export_table, temp, self.export_format)
     return export_response(temp, self.export_format, self.export_name)
Exemple #8
0
 def export_response(self):
     """
         Intention: Not to be overridden in general.
         Returns the tabular export of the data, if available.
     """
     temp = StringIO()
     export_from_tables(self.export_table, temp, self.export_format)
     return export_response(temp, self.export_format, self.export_name)
Exemple #9
0
 def email_response(self):
     fd, path = tempfile.mkstemp()
     with os.fdopen(fd, 'wb') as temp:
         export_from_tables(self.export_table, temp, Format.HTML)
     with open(path) as f:
         return HttpResponse(json.dumps({
             'report': f.read(),
         }))
Exemple #10
0
def create_excel_file(excel_data, data_type, file_format):
    file_hash = uuid.uuid4().hex
    export_file = BytesIO()
    icds_file = IcdsFile(blob_id=file_hash, data_type=data_type)
    export_from_tables(excel_data, export_file, file_format)
    export_file.seek(0)
    icds_file.store_file_in_blobdb(export_file, expired=60 * 60 * 24)
    icds_file.save()
    return file_hash
Exemple #11
0
def create_excel_file(excel_data, data_type, file_format, blob_key=None, timeout=ONE_DAY):
    key = blob_key or uuid.uuid4().hex
    export_file = BytesIO()
    icds_file, _ = IcdsFile.objects.get_or_create(blob_id=key, data_type=data_type)
    export_from_tables(excel_data, export_file, file_format)
    export_file.seek(0)
    icds_file.store_file_in_blobdb(export_file, expired=timeout)
    icds_file.save()
    return key
Exemple #12
0
def create_excel_file(excel_data, data_type, file_format):
    file_hash = uuid.uuid4().hex
    export_file = BytesIO()
    icds_file = IcdsFile(blob_id=file_hash, data_type=data_type)
    export_from_tables(excel_data, export_file, file_format)
    export_file.seek(0)
    icds_file.store_file_in_blobdb(export_file, expired=60 * 60 * 24)
    icds_file.save()
    return file_hash
Exemple #13
0
def export_ucr_async(export_table, download_id, title, user):
    use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled
    filename = u'{}.xlsx'.format(title.replace(u'/', u'?'))
    file_path = get_download_file_path(use_transfer, filename)
    export_from_tables(export_table, file_path, Format.XLS_2007)
    expose_download(use_transfer, file_path, filename, download_id, 'xlsx')
    link = reverse("retrieve_download", args=[download_id], params={"get_file": '1'}, absolute=True)

    send_report_download_email(title, user, link)
Exemple #14
0
def export_data_source(request, domain, config_id):
    config, _ = get_datasource_config_or_404(config_id, domain)
    adapter = IndicatorSqlAdapter(config)
    q = adapter.get_query_object()
    table = adapter.get_table()

    try:
        params = process_url_params(request.GET, table.columns)
        allowed_formats = [
            Format.CSV,
            Format.HTML,
            Format.XLS,
            Format.XLS_2007,
        ]
        if params.format not in allowed_formats:
            msg = ugettext_lazy('format must be one of the following: {}').format(', '.join(allowed_formats))
            return HttpResponse(msg, status=400)
    except UserQueryError as e:
        return HttpResponse(e.message, status=400)

    q = q.filter_by(**params.keyword_filters)
    for sql_filter in params.sql_filters:
        q = q.filter(sql_filter)

    # xls format has limit of 65536 rows
    # First row is taken up by headers
    if params.format == Format.XLS and q.count() >= 65535:
        keyword_params = dict(**request.GET)
        # use default format
        if 'format' in keyword_params:
            del keyword_params['format']
        return HttpResponseRedirect(
            '%s?%s' % (
                reverse('export_configurable_data_source', args=[domain, config._id]),
                urlencode(keyword_params)
            )
        )

    # build export
    def get_table(q):
        yield table.columns.keys()
        for row in q:
            yield row

    fd, path = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as tmpfile:
        try:
            tables = [[config.table_id, get_table(q)]]
            export_from_tables(tables, tmpfile, params.format)
        except exc.DataError:
            msg = ugettext_lazy(
                "There was a problem executing your query, "
                "please make sure your parameters are valid."
            )
            return HttpResponse(msg, status=400)
        return export_response(Temp(path), params.format, config.display_name)
Exemple #15
0
    def export_response(self):
        if self.export_too_large:
            # Frontend should check size with export_size_check_response()
            # Before hitting this endpoint, but we check the size again here
            # in case the user modifies the url manually.
            return HttpResponseBadRequest()

        temp = StringIO()
        export_from_tables(self.export_table, temp, Format.XLS_2007)
        return export_response(temp, Format.XLS_2007, self.title)
Exemple #16
0
    def export_response(self):
        if self.export_too_large:
            # Frontend should check size with export_size_check_response()
            # Before hitting this endpoint, but we check the size again here
            # in case the user modifies the url manually.
            return HttpResponseBadRequest()

        temp = StringIO()
        export_from_tables(self.export_table, temp, Format.XLS_2007)
        return export_response(temp, Format.XLS_2007, self.title)
Exemple #17
0
 def test_bytestrings(self):
     format_ = Format.XLS_2007
     file_ = io.BytesIO()
     table = [
         [b'heading\xe2\x80\x931', b'heading\xe2\x80\x932', b'heading\xe2\x80\x933'],
         [b'row1\xe2\x80\x931', b'row1\xe2\x80\x932', b'row1\xe2\x80\x933'],
         [b'row2\xe2\x80\x931', b'row2\xe2\x80\x932', b'row2\xe2\x80\x933'],
     ]
     tables = [[b'table\xe2\x80\x93title', table]]
     export_from_tables(tables, file_, format_)
Exemple #18
0
 def test_bytestrings(self):
     format_ = Format.XLS_2007
     file_ = io.BytesIO()
     table = [
         [b'heading\xe2\x80\x931', b'heading\xe2\x80\x932', b'heading\xe2\x80\x933'],
         [b'row1\xe2\x80\x931', b'row1\xe2\x80\x932', b'row1\xe2\x80\x933'],
         [b'row2\xe2\x80\x931', b'row2\xe2\x80\x932', b'row2\xe2\x80\x933'],
     ]
     tables = [[b'table\xe2\x80\x93title', table]]
     export_from_tables(tables, file_, format_)
Exemple #19
0
def export_data_source(request, domain, config_id):
    config, _ = get_datasource_config_or_404(config_id, domain)
    adapter = IndicatorSqlAdapter(config)
    q = adapter.get_query_object()
    table = adapter.get_table()

    try:
        params = process_url_params(request.GET, table.columns)
        allowed_formats = [
            Format.CSV,
            Format.HTML,
            Format.XLS,
            Format.XLS_2007,
        ]
        if params.format not in allowed_formats:
            msg = ugettext_lazy(
                'format must be one of the following: {}').format(
                    ', '.join(allowed_formats))
            return HttpResponse(msg, status=400)
    except UserQueryError as e:
        return HttpResponse(e.message, status=400)

    q = q.filter_by(**params.keyword_filters)
    for sql_filter in params.sql_filters:
        q = q.filter(sql_filter)

    # xls format has limit of 65536 rows
    # First row is taken up by headers
    if params.format == Format.XLS and q.count() >= 65535:
        keyword_params = dict(**request.GET)
        # use default format
        if 'format' in keyword_params:
            del keyword_params['format']
        return HttpResponseRedirect(
            '%s?%s' %
            (reverse('export_configurable_data_source',
                     args=[domain, config._id]), urlencode(keyword_params)))

    # build export
    def get_table(q):
        yield table.columns.keys()
        for row in q:
            yield row

    fd, path = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as tmpfile:
        try:
            tables = [[config.table_id, get_table(q)]]
            export_from_tables(tables, tmpfile, params.format)
        except exc.DataError:
            msg = ugettext_lazy("There was a problem executing your query, "
                                "please make sure your parameters are valid.")
            return HttpResponse(msg, status=400)
        return export_response(Temp(path), params.format, config.display_name)
Exemple #20
0
 def export_response(self):
     """
     Intention: Not to be overridden in general.
     Returns the tabular export of the data, if available.
     """
     if self.exportable_all:
         export_all_rows_task.delay(self.__class__, self.__getstate__())
         return HttpResponse()
     else:
         temp = StringIO()
         export_from_tables(self.export_table, temp, self.export_format)
         return export_response(temp, self.export_format, self.export_name)
Exemple #21
0
 def export_response(self):
     """
     Intention: Not to be overridden in general.
     Returns the tabular export of the data, if available.
     """
     if self.exportable_all:
         export_all_rows_task.delay(self.__class__, self.__getstate__())
         return HttpResponse()
     else:
         temp = io.BytesIO()
         export_from_tables(self.export_table, temp, self.export_format)
         return export_response(temp, self.export_format, self.export_name)
Exemple #22
0
    def test_data_length(self):
        format_ = Format.XLS
        file_ = io.BytesIO()
        table = [
            ['header{}'.format(i) for i in range(MAX_XLS_COLUMNS + 1)],
            ['row{}'.format(i) for i in range(MAX_XLS_COLUMNS + 1)],
        ]
        tables = [['title', table]]

        with self.assertRaises(XlsLengthException):
            export_from_tables(tables, file_, format_)

        table = [
            ['header{}'.format(i) for i in range(MAX_XLS_COLUMNS)],
            ['row{}'.format(i) for i in range(MAX_XLS_COLUMNS)],
        ]
        tables = [['title', table]]
        export_from_tables(tables, file_, format_)
Exemple #23
0
    def test_data_length(self):
        format_ = Format.XLS
        file_ = io.BytesIO()
        table = [
            ['header{}'.format(i) for i in range(MAX_XLS_COLUMNS + 1)],
            ['row{}'.format(i) for i in range(MAX_XLS_COLUMNS + 1)],
        ]
        tables = [['title', table]]

        with self.assertRaises(XlsLengthException):
            export_from_tables(tables, file_, format_)

        table = [
            ['header{}'.format(i) for i in range(MAX_XLS_COLUMNS)],
            ['row{}'.format(i) for i in range(MAX_XLS_COLUMNS)],
        ]
        tables = [['title', table]]
        export_from_tables(tables, file_, format_)
Exemple #24
0
    def test_nones_transformed(self):
        headers = ('Breakfast', 'Breakfast', 'Amuse-Bouche', 'Breakfast')
        row = ('spam', 'spam', None, 'spam')
        table = (headers, row, row, row)
        export_tables = (('Spam', table), )

        with closing(io.BytesIO()) as file_:
            export_from_tables(export_tables, file_, Format.HTML)
            html_string = file_.getvalue()

        root = html.fromstring(html_string)
        html_rows = [[etree.tostring(td).strip() for td in tr.xpath('./td')]
                     for tr in root.xpath('./body/table/tbody/tr')]
        self.assertEqual(
            html_rows,
            [['<td>spam</td>', '<td>spam</td>', '<td/>', '<td>spam</td>'],
             ['<td>spam</td>', '<td>spam</td>', '<td/>', '<td>spam</td>'],
             ['<td>spam</td>', '<td>spam</td>', '<td/>', '<td>spam</td>']])
Exemple #25
0
    def test_nones_transformed(self):
        headers = ('Breakfast', 'Breakfast', 'Amuse-Bouche', 'Breakfast')
        row = ('spam', 'spam', None, 'spam')
        table = (headers, row, row, row)
        export_tables = (('Spam', table),)

        with closing(io.BytesIO()) as file_:
            export_from_tables(export_tables, file_, Format.HTML)
            html_string = file_.getvalue()

        root = html.fromstring(html_string)
        html_rows = [
            [etree.tostring(td).strip().decode('utf-8') for td in tr.xpath('./td')]
            for tr in root.xpath('./body/table/tbody/tr')
        ]
        self.assertEqual(html_rows,
                         [['<td>spam</td>', '<td>spam</td>', '<td/>', '<td>spam</td>'],
                          ['<td>spam</td>', '<td>spam</td>', '<td/>', '<td>spam</td>'],
                          ['<td>spam</td>', '<td>spam</td>', '<td/>', '<td>spam</td>']])
    def send_report(self, recipient):
        yesterday = datetime.date.today() - datetime.timedelta(days=1)
        yesterday_string = yesterday.strftime("%d %b %Y")
        table = self._generate_report_table()

        file_to_attach = io.BytesIO()
        export_from_tables([[yesterday_string, table]], file_to_attach,
                           Format.XLS_2007)

        email_context = {
            'date_of_report': yesterday_string,
        }
        email_content = render_to_string('accounting/email/credits_on_hq.html',
                                         email_context)
        email_content_plaintext = render_to_string(
            'accounting/email/credits_on_hq.txt', email_context)
        format_dict = Format.FORMAT_DICT[Format.XLS_2007]

        file_attachment = {
            'title':
            'Credits_on_hq_{}_{}'.format(
                yesterday.isoformat(),
                settings.SERVER_ENVIRONMENT,
            ),
            'mimetype':
            format_dict['mimetype'],
            'file_obj':
            file_to_attach,
        }

        from_email = "Dimagi Finance <{}>".format(settings.DEFAULT_FROM_EMAIL)
        send_HTML_email(
            "{} Credits on HQ {}".format(
                yesterday_string,
                settings.SERVER_ENVIRONMENT,
            ),
            recipient,
            email_content,
            email_from=from_email,
            text_content=email_content_plaintext,
            file_attachments=[file_attachment],
        )
Exemple #27
0
def export_case_transactions(request, domain, case_id):
    case = get_document_or_404(CommCareCase, domain, case_id)
    products_by_id = dict(SQLProduct.objects.filter(domain=domain).values_list('product_id', 'name'))

    headers = [
        _('case id'),
        _('case name'),
        _('section'),
        _('date'),
        _('product_id'),
        _('product_name'),
        _('transaction amount'),
        _('type'),
        _('ending balance'),
    ]

    def _make_row(transaction):
        return [
            transaction.case_id,
            case.name,
            transaction.section_id,
            transaction.report.date if transaction.report_id else '',
            transaction.product_id,
            products_by_id.get(transaction.product_id, _('unknown product')),
            transaction.quantity,
            transaction.type,
            transaction.stock_on_hand,
        ]

    query_set = StockTransaction.objects.select_related('report')\
        .filter(case_id=case_id).order_by('section_id', 'report__date')

    formatted_table = [
        [
            'stock transactions',
            [headers] + [_make_row(txn) for txn in query_set]
        ]
    ]
    tmp = StringIO()
    export_from_tables(formatted_table, tmp, 'xlsx')
    return export_response(tmp, 'xlsx', '{}-stock-transactions'.format(case.name))
Exemple #28
0
    def test_export_raw(self):
        headers = (('people', ('name', 'gender')), ('offices', ('location', 'name')))
        data = (
            ('people', [('danny', 'male'), ('amelia', 'female'), ('carter', 'various')]),
            ('offices', [('Delhi, India', 'DSI'), ('Boston, USA', 'Dimagi, Inc'), ('Capetown, South Africa', 'DSA')])
        )
        EXPECTED = {"offices": {"headers": ["location", "name"], "rows": [["Delhi, India", "DSI"], ["Boston, USA", "Dimagi, Inc"], ["Capetown, South Africa", "DSA"]]}, "people": {"headers": ["name", "gender"], "rows": [["danny", "male"], ["amelia", "female"], ["carter", "various"]]}}

        that = self

        class Tester(object):

            def __enter__(self):
                self.buffer = BytesIO()
                return self.buffer

            def __exit__(self, exc_type, exc_val, exc_tb):
                if exc_type is None:
                    that.assertDictEqual(json.loads(self.buffer.getvalue()), EXPECTED)
                self.buffer.close()

        with Tester() as buffer:
            export_raw(headers, data, buffer, format=Format.JSON)

        with Tester() as buffer:
            # test lists
            export_raw(list(headers), list(data), buffer, format=Format.JSON)

        with Tester() as buffer:
            # test generators
            export_raw((h for h in headers), ((name, (r for r in rows)) for name, rows in data), buffer, format=Format.JSON)
            
        with Tester() as buffer:
            # test export_from_tables
            headers = dict(headers)
            data = dict(data)
            tables = {}
            for key in set(headers.keys()) | set(data.keys()):
                tables[key] = itertools.chain([headers[key]], data[key])

            export_from_tables(list(tables.items()), buffer, format=Format.JSON)
Exemple #29
0
def export_data_source(request, domain, config_id):
    format = request.GET.get('format', Format.UNZIPPED_CSV)
    config = get_document_or_404(DataSourceConfiguration, domain, config_id)
    table = get_indicator_table(config)
    q = Session.query(table)
    column_headers = [col['name'] for col in q.column_descriptions]

    # apply filtering if any
    filter_values = {key: value for key, value in request.GET.items() if key != 'format'}
    for key in filter_values:
        if key not in column_headers:
            return HttpResponse('Invalid filter parameter: {}'.format(key), status=400)
    q = q.filter_by(**filter_values)

    # build export
    def get_table(q):
        yield column_headers
        for row in q:
            yield row

    fd, path = tempfile.mkstemp()
    with os.fdopen(fd, 'wb') as temp:
        export_from_tables([[config.table_id, get_table(q)]], temp, format)
        return export_response(Temp(path), format, config.display_name)
Exemple #30
0
def collect_inactive_awws():
    celery_task_logger.info("Started updating the Inactive AWW")
    filename = "inactive_awws_%s.csv" % date.today().strftime('%Y-%m-%d')
    last_sync = IcdsFile.objects.filter(data_type='inactive_awws').order_by('-file_added').first()

    # If last sync not exist then collect initial data
    if not last_sync:
        last_sync_date = datetime(2017, 3, 1).date()
    else:
        last_sync_date = last_sync.file_added

    _aggregate_inactive_aww(last_sync_date)

    celery_task_logger.info("Collecting inactive AWW to generate zip file")
    excel_data = AggregateInactiveAWW.objects.all()

    celery_task_logger.info("Preparing data to csv file")
    columns = [x.name for x in AggregateInactiveAWW._meta.fields] + [
        'days_since_start',
        'days_inactive'
    ]
    rows = [columns]
    for data in excel_data:
        rows.append(
            [_get_value(data, field) for field in columns]
        )

    celery_task_logger.info("Creating csv file")
    export_file = BytesIO()
    export_from_tables([['inactive AWWSs', rows]], export_file, 'csv')

    celery_task_logger.info("Saving csv file in blobdb")
    sync = IcdsFile(blob_id=filename, data_type='inactive_awws')
    sync.store_file_in_blobdb(export_file)
    sync.save()
    celery_task_logger.info("Ended updating the Inactive AWW")
Exemple #31
0
 def excel_response(self):
     file = StringIO()
     export_from_tables(self.export_table, file, Format.XLS_2007)
     return file
Exemple #32
0
 def format(self, format):
     tables = self._tables.items()
     f = StringIO()
     export_from_tables(tables, f, format)
     return f.getvalue()
Exemple #33
0
def create_excel_file(domain, excel_data, data_type, file_format):
    export_file = BytesIO()
    export_from_tables(excel_data, export_file, file_format)
    export_file.seek(0)
    meta = store_file_in_blobdb(domain, export_file)
    return meta.key
Exemple #34
0
 def excel_response(self):
     file = StringIO()
     export_from_tables(self.export_table, file, Format.XLS_2007)
     return file
Exemple #35
0
 def excel_response(self):
     file = io.BytesIO()
     export_from_tables(self.export_table, file, self.export_format)
     return file
Exemple #36
0
 def create_export(self, file_path, format_):
     """Save this report to a file
     :param file_path: The path to the file the report should be saved
     :param format_: The format of the resulting export
     """
     return export_from_tables(self.get_table(), file_path, format_)
Exemple #37
0
 def excel_response(self):
     file = StringIO()
     export_from_tables(self.export_table, file, self.export_format)
     return file
Exemple #38
0
def weekly_digest():
    today = datetime.date.today()
    in_forty_days = today + datetime.timedelta(days=40)

    ending_in_forty_days = filter(
        lambda sub: not sub.is_renewed,
        Subscription.objects.filter(
            date_end__lte=in_forty_days,
            date_end__gte=today,
            is_active=True,
            is_trial=False,
        ))

    if not ending_in_forty_days:
        logger.info(
            "[Billing] Did not send summary of ending subscriptions because "
            "there are none."
        )
        return

    table = [[
        "Project Space", "Account", "Plan", "Salesforce Contract ID",
        "Dimagi Contact", "Start Date", "End Date", "Receives Invoice",
        "Created By",
    ]]

    def _fmt_row(sub):
        try:
            created_by_adj = SubscriptionAdjustment.objects.filter(
                subscription=sub,
                reason=SubscriptionAdjustmentReason.CREATE
            ).order_by('date_created')[0]
            created_by = dict(SubscriptionAdjustmentMethod.CHOICES).get(
                created_by_adj.method, "Unknown")
        except (IndexError, SubscriptionAdjustment.DoesNotExist):
            created_by = "Unknown"
        return [
            sub.subscriber.domain,
            "%s (%s)" % (sub.account.name, sub.account.id),
            sub.plan_version.plan.name,
            sub.salesforce_contract_id,
            sub.account.dimagi_contact,
            sub.date_start,
            sub.date_end,
            "No" if sub.do_not_invoice else "YES",
            created_by,
        ]

    table.extend([_fmt_row(sub) for sub in ending_in_forty_days])

    file_to_attach = StringIO()
    export_from_tables(
        [['End in 40 Days', table]],
        file_to_attach,
        Format.XLS_2007
    )

    email_context = {
        'today': today.isoformat(),
        'forty_days': in_forty_days.isoformat(),
    }
    email_content = render_to_string(
        'accounting/digest_email.html', email_context)
    email_content_plaintext = render_to_string(
        'accounting/digest_email.txt', email_context)

    format_dict = Format.FORMAT_DICT[Format.XLS_2007]
    file_attachment = {
        'title': 'Subscriptions_%(start)s_%(end)s.xls' % {
            'start': today.isoformat(),
            'end': in_forty_days.isoformat(),
        },
        'mimetype': format_dict['mimetype'],
        'file_obj': file_to_attach,
    }
    from_email = "Dimagi Accounting <%s>" % settings.DEFAULT_FROM_EMAIL
    send_HTML_email(
        "Subscriptions ending in 40 Days from %s" % today.isoformat(),
        settings.INVOICING_CONTACT_EMAIL,
        email_content,
        email_from=from_email,
        text_content=email_content_plaintext,
        file_attachments=[file_attachment],
    )

    logger.info(
        "[BILLING] Sent summary of ending subscriptions from %(today)s" % {
            'today': today.isoformat(),
        })
Exemple #39
0
    def to_export(self, format, location):
        export_file = BytesIO()
        excel_data = self.get_excel_data(location)

        export_from_tables(excel_data, export_file, format)
        return export_response(export_file, format, self.title)
Exemple #40
0
def global_report(request, template="hqadmin/global.html", as_export=False):
    def _flot_format(result):
        return int(datetime(year=result["key"][0], month=result["key"][1], day=1).strftime("%s")) * 1000

    def _export_format(result):
        return datetime(year=result["key"][0], month=result["key"][1], day=1).strftime("%Y-%m-%d")

    context = get_hqadmin_base_context(request)

    def _metric(name):
        counts = []
        for result in get_db().view("hqadmin/%ss_over_time" % name, group_level=2):
            if not result or not result.has_key("key") or not result.has_key("value"):
                continue
            if (
                result["key"][0]
                and int(result["key"][0]) >= 2010
                and (
                    int(result["key"][0]) < datetime.utcnow().year
                    or (
                        int(result["key"][0]) == datetime.utcnow().year
                        and int(result["key"][1]) <= datetime.utcnow().month
                    )
                )
            ):
                counts.append([_export_format(result) if as_export else _flot_format(result), result["value"]])
        context["%s_counts" % name] = counts
        counts_int = deepcopy(counts)
        for i in range(1, len(counts_int)):
            if isinstance(counts_int[i][1], int):
                counts_int[i][1] += counts_int[i - 1][1]
        context["%s_counts_int" % name] = counts_int

    standard_metrics = ["case", "form", "user"]
    for m in standard_metrics:
        _metric(m)

    def _active_metric(name):
        dates = {}
        for result in get_db().view("hqadmin/%ss_over_time" % name, group=True):
            if not result or not result.has_key("key") or not result.has_key("value"):
                continue
            if (
                result["key"][0]
                and int(result["key"][0]) >= 2010
                and (
                    int(result["key"][0]) < datetime.utcnow().year
                    or (
                        int(result["key"][0]) == datetime.utcnow().year
                        and int(result["key"][1]) <= datetime.utcnow().month
                    )
                )
            ):
                date = _export_format(result) if as_export else _flot_format(result)
                if not date in dates:
                    dates[date] = set([result["key"][2]])
                else:
                    dates[date].update([result["key"][2]])
        datelist = [[date, dates[date]] for date in sorted(dates.keys())]
        domainlist = [[x[0], len(x[1])] for x in datelist]
        domainlist_int = deepcopy(datelist)
        for i in range(1, len(domainlist_int)):
            domainlist_int[i][1] = list(set(domainlist_int[i - 1][1]).union(domainlist_int[i][1]))
        domainlist_int = [[x[0], len(x[1])] for x in domainlist_int]
        context["%s_counts" % name] = domainlist
        context["%s_counts_int" % name] = domainlist_int

    active_metrics = ["active_domain", "active_user"]
    for a in active_metrics:
        _active_metric(a)

    if as_export:
        all_metrics = standard_metrics + active_metrics
        format = request.GET.get("format", "xls")
        tables = []
        for metric_name in all_metrics:
            table = context.get("%s_counts" % metric_name, [])
            table = [["%s" % item[0], "%d" % item[1]] for item in table]
            table.reverse()
            table.append(["date", "%s count" % metric_name])
            table.reverse()

            table_int = context.get("%s_counts_int" % metric_name, [])
            table_int = [["%s" % item[0], "%d" % item[1]] for item in table_int]
            table_int.reverse()
            table_int.append(["date", "%s count, cumulative" % metric_name])
            table_int.reverse()

            tables.append(["%s counts" % metric_name, table])
            tables.append(["%s cumulative" % metric_name, table_int])
        temp = StringIO()
        export_from_tables(tables, temp, format)
        return export_response(temp, format, "GlobalReport")

    context["hide_filters"] = True

    return render(request, template, context)
Exemple #41
0
 def create_export(self, file_path, format_):
     """Save this report to a file
     :param file_path: The path to the file the report should be saved
     :param format_: The format of the resulting export
     """
     return export_from_tables(self.get_table(), file_path, format_)
Exemple #42
0
 def _export_response_direct(self):
     temp = io.BytesIO()
     export_from_tables(self.export_table, temp, self.export_format)
     return export_response(temp, self.export_format, self.export_name)
Exemple #43
0
def global_report(request, template="hqadmin/global.html", as_export=False):
    def _flot_format(result):
        return int(
            datetime(year=result['key'][0], month=result['key'][1],
                     day=1).strftime("%s")) * 1000

    def _export_format(result):
        return datetime(year=result['key'][0], month=result['key'][1],
                        day=1).strftime("%Y-%m-%d")

    context = get_hqadmin_base_context(request)

    def _metric(name):
        counts = []
        for result in get_db().view("hqadmin/%ss_over_time" % name,
                                    group_level=2):
            if not result or not result.has_key('key') or not result.has_key(
                    'value'):
                continue
            if result['key'][0] and int(result['key'][0]) >= 2010 and \
               (int(result['key'][0]) < datetime.utcnow().year or
                (int(result['key'][0]) == datetime.utcnow().year and
                 int(result['key'][1]) <= datetime.utcnow().month)):
                counts.append([
                    _export_format(result)
                    if as_export else _flot_format(result), result['value']
                ])
        context['%s_counts' % name] = counts
        counts_int = deepcopy(counts)
        for i in range(1, len(counts_int)):
            if isinstance(counts_int[i][1], int):
                counts_int[i][1] += counts_int[i - 1][1]
        context['%s_counts_int' % name] = counts_int

    standard_metrics = ["case", "form", "user"]
    for m in standard_metrics:
        _metric(m)

    def _active_metric(name):
        dates = {}
        for result in get_db().view("hqadmin/%ss_over_time" % name,
                                    group=True):
            if not result or not result.has_key('key') or not result.has_key(
                    'value'):
                continue
            if result['key'][0] and int(result['key'][0]) >= 2010 and\
               (int(result['key'][0]) < datetime.utcnow().year or
                (int(result['key'][0]) == datetime.utcnow().year and
                 int(result['key'][1]) <= datetime.utcnow().month)):
                date = _export_format(result) if as_export else _flot_format(
                    result)
                if not date in dates:
                    dates[date] = set([result['key'][2]])
                else:
                    dates[date].update([result['key'][2]])
        datelist = [[date, dates[date]] for date in sorted(dates.keys())]
        domainlist = [[x[0], len(x[1])] for x in datelist]
        domainlist_int = deepcopy(datelist)
        for i in range(1, len(domainlist_int)):
            domainlist_int[i][1] = list(
                set(domainlist_int[i - 1][1]).union(domainlist_int[i][1]))
        domainlist_int = [[x[0], len(x[1])] for x in domainlist_int]
        context['%s_counts' % name] = domainlist
        context['%s_counts_int' % name] = domainlist_int

    active_metrics = ["active_domain", "active_user"]
    for a in active_metrics:
        _active_metric(a)

    if as_export:
        all_metrics = standard_metrics + active_metrics
        format = request.GET.get("format", "xls")
        tables = []
        for metric_name in all_metrics:
            table = context.get('%s_counts' % metric_name, [])
            table = [["%s" % item[0], "%d" % item[1]] for item in table]
            table.reverse()
            table.append(["date", "%s count" % metric_name])
            table.reverse()

            table_int = context.get('%s_counts_int' % metric_name, [])
            table_int = [["%s" % item[0], "%d" % item[1]]
                         for item in table_int]
            table_int.reverse()
            table_int.append(["date", "%s count, cumulative" % metric_name])
            table_int.reverse()

            tables.append(["%s counts" % metric_name, table])
            tables.append(["%s cumulative" % metric_name, table_int])
        temp = StringIO()
        export_from_tables(tables, temp, format)
        return export_response(temp, format, "GlobalReport")

    context['hide_filters'] = True

    return render(request, template, context)
Exemple #44
0
def weekly_digest():
    today = datetime.date.today()
    in_forty_days = today + datetime.timedelta(days=40)

    ending_in_forty_days = filter(
        lambda sub: not sub.is_renewed,
        Subscription.objects.filter(
            date_end__lte=in_forty_days,
            date_end__gte=today,
            is_active=True,
            is_trial=False,
            account__dimagi_contact__isnull=True,
        ))

    if not ending_in_forty_days:
        log_accounting_info(
            "Did not send summary of ending subscriptions because "
            "there are none.")
        return

    table = [[
        "Project Space",
        "Account",
        "Plan",
        "Salesforce Contract ID",
        "Dimagi Contact",
        "Start Date",
        "End Date",
        "Receives Invoice",
        "Created By",
    ]]

    def _fmt_row(sub):
        try:
            created_by_adj = SubscriptionAdjustment.objects.filter(
                subscription=sub,
                reason=SubscriptionAdjustmentReason.CREATE).order_by(
                    'date_created')[0]
            created_by = dict(SubscriptionAdjustmentMethod.CHOICES).get(
                created_by_adj.method, "Unknown")
        except (IndexError, SubscriptionAdjustment.DoesNotExist):
            created_by = "Unknown"
        return [
            sub.subscriber.domain,
            "%s (%s)" % (sub.account.name, sub.account.id),
            sub.plan_version.plan.name,
            sub.salesforce_contract_id,
            sub.account.dimagi_contact,
            sub.date_start,
            sub.date_end,
            "No" if sub.do_not_invoice else "YES",
            created_by,
        ]

    table.extend([_fmt_row(sub) for sub in ending_in_forty_days])

    file_to_attach = StringIO()
    export_from_tables([['End in 40 Days', table]], file_to_attach,
                       Format.XLS_2007)

    email_context = {
        'today': today.isoformat(),
        'forty_days': in_forty_days.isoformat(),
    }
    email_content = render_to_string('accounting/digest_email.html',
                                     email_context)
    email_content_plaintext = render_to_string('accounting/digest_email.txt',
                                               email_context)

    format_dict = Format.FORMAT_DICT[Format.XLS_2007]
    file_attachment = {
        'title': 'Subscriptions_%(start)s_%(end)s.xls' % {
            'start': today.isoformat(),
            'end': in_forty_days.isoformat(),
        },
        'mimetype': format_dict['mimetype'],
        'file_obj': file_to_attach,
    }
    from_email = "Dimagi Accounting <%s>" % settings.DEFAULT_FROM_EMAIL
    env = ("[{}] ".format(settings.SERVER_ENVIRONMENT.upper())
           if settings.SERVER_ENVIRONMENT != "production" else "")
    email_subject = "{}Subscriptions ending in 40 Days from {}".format(
        env, today.isoformat())
    send_HTML_email(
        email_subject,
        settings.ACCOUNTS_EMAIL,
        email_content,
        email_from=from_email,
        text_content=email_content_plaintext,
        file_attachments=[file_attachment],
    )

    log_accounting_info("Sent summary of ending subscriptions from %(today)s" %
                        {
                            'today': today.isoformat(),
                        })
Exemple #45
0
def create_excel_file(domain, excel_data, data_type, file_format):
    export_file = BytesIO()
    export_from_tables(excel_data, export_file, file_format)
    export_file.seek(0)
    meta = store_file_in_blobdb(domain, export_file)
    return meta.key
Exemple #46
0
    def to_export(self, format, location):
        export_file = BytesIO()
        excel_data = self.get_excel_data(location)

        export_from_tables(excel_data, export_file, format)
        return export_response(export_file, format, self.title)
Exemple #47
0
 def email_response(self):
     with closing(StringIO()) as temp:
         export_from_tables(self.export_table, temp, Format.HTML)
         return self.render_json_response({
             'report': temp.getvalue(),
         })