Esempio n. 1
0
 def _get_workbook_from_data(self, headers, rows):
     file = BytesIO()
     export_raw(headers, rows, file, format=Format.XLS_2007)
     with tempfile.TemporaryFile(suffix='.xlsx') as f:
         f.write(file.getvalue())
         f.seek(0)
         return get_workbook(f)
    def upload_raw_excel_translations(self,
                                      excel_headers,
                                      excel_data,
                                      expected_messages=None):
        """
        Prepares bulk app translation excel file and uploads it

        Structure of the xlsx file can be specified as following

        excel_headers:
         (("employee", ("id", "name", "gender")),
          ("building", ("id", "name", "address")))

        excel_data:
         (("employee", (("1", "cory", "m"),
                        ("2", "christian", "m"),
                        ("3", "amelia", "f"))),
          ("building", (("1", "dimagi", "585 mass ave."),
                        ("2", "old dimagi", "529 main st."))))
        """
        if not expected_messages:
            expected_messages = ["App Translations Updated!"]

        file = StringIO()
        export_raw(excel_headers, excel_data, file, format=Format.XLS_2007)

        with tempfile.TemporaryFile(suffix='.xlsx') as f:
            f.write(file.getvalue())
            messages = process_bulk_app_translation_upload(self.app, f)

        self.assertListEqual([m[1] for m in messages], expected_messages)
Esempio n. 3
0
    def get(self, request, domain, app_id):
        case_metadata = self.app.get_case_metadata()
        language = request.GET.get('lang', 'en')

        headers = [(_('All Case Properties'), ('case_type', 'case_property',
                                               'description')),
                   (_('Case Types'), ('type', 'relationships', 'opened_by',
                                      'closed_by'))]
        headers += list(
            (case_type.name, tuple(CASE_SUMMARY_EXPORT_HEADER_NAMES))
            for case_type in case_metadata.case_types)

        data = list(
            (_('All Case Properties'), self.get_case_property_rows(case_type))
            for case_type in case_metadata.case_types)
        data += list(
            (_('Case Types'), self.get_case_type_rows(case_type, language))
            for case_type in case_metadata.case_types)
        data += list(
            (case_type.name, self.get_case_questions_rows(case_type, language))
            for case_type in case_metadata.case_types)

        export_string = io.BytesIO()
        export_raw(tuple(headers), data, export_string, Format.XLS_2007),
        return export_response(
            export_string,
            Format.XLS_2007,
            u'{app_name} v.{app_version} - Case Summary ({lang})'.format(
                app_name=self.app.name,
                app_version=self.app.version,
                lang=language),
        )
 def get_worksheet(self, title):
     string_io = StringIO()
     export_raw(self.headers, self.data, string_io, format=Format.XLS_2007)
     string_io.seek(0)
     workbook = WorkbookJSONReader(
         string_io)  # __init__ will read string_io
     return workbook.worksheets_by_title[title]
Esempio n. 5
0
def _export_pis(report, report_slug, non_data_cols=3):
    context = report.get_data(include_urls=False)
    
    
    # THESE FUNCTIONS ARE TOTAL HACKS.
    # they rely on knowing the first two or three values are clinic, year, month,
    # and then the rest are fractional
    def _transform_headings(headings, non_data_cols):
        ret = headings[:non_data_cols]
        for h in headings[non_data_cols:]:
            ret.append("%s num" % h)
            ret.append("%s denom" % h)
            ret.append("%s pct" % h)
        return ret
    
    def _transform_rows(rows, non_data_cols):
        return [_transform_values(r, non_data_cols) for r in rows]
    
    def _transform_values(values, non_data_cols):
        ret = values[:non_data_cols]
        for v in values[non_data_cols:]:
            if v != "N/A":
                for special in "(/)":
                    v = v.replace(special, " ")
                pct, num, denom = v.split()
                ret.extend([num, denom, pct])
            else:
                ret.extend(["N/A"] * 3)
        return ret
    
    temp = StringIO()
    export_raw((("data", _transform_headings(context["headings"], non_data_cols)),),
               (("data", _transform_rows(context["rows"], non_data_cols)),), temp)
    return export_response(temp, "xlsx", report_slug)
Esempio n. 6
0
    def test_upload(self):
        headers = (("translations", ("id", "name", "case_type")), )
        data = (("translations", (
            (self.translated_rule.id, 'test updated', 'song'),
            (self.email_rule.id, 'test email', 'song'),
            (1000, 'Not a rule', 'person'),
        )), )
        file = BytesIO()
        export_raw(headers, data, file, format=Format.XLS_2007)

        with tempfile.TemporaryFile(suffix='.xlsx') as f:
            f.write(file.getvalue())
            f.seek(0)
            workbook = get_workbook(f)
            msgs = [
                m[1] for m in upload_conditional_alert_rows(
                    self.domain, workbook.get_worksheet())
            ]
            self.assertEqual(len(msgs), 3)
            self._assertPatternIn(
                r"Row 3, with rule id \d+, does not use SMS content", msgs)
            self._assertPatternIn(
                r"Could not find rule for row 4, with id \d+", msgs)
            self.assertIn("Updated 1 rule(s)", msgs)
            self.assertEqual(self.translated_rule.name, 'test updated')
            self.assertEqual(self.translated_rule.case_type, 'song')
            self.assertEqual(self.untranslated_rule.name, 'test')
            self.assertEqual(self.untranslated_rule.case_type, 'person')
    def upload_raw_excel_translations(self, excel_headers, excel_data, expected_messages=None):
        """
        Prepares bulk app translation excel file and uploads it

        Structure of the xlsx file can be specified as following

        excel_headers:
         (("employee", ("id", "name", "gender")),
          ("building", ("id", "name", "address")))

        excel_data:
         (("employee", (("1", "cory", "m"),
                        ("2", "christian", "m"),
                        ("3", "amelia", "f"))),
          ("building", (("1", "dimagi", "585 mass ave."),
                        ("2", "old dimagi", "529 main st."))))
        """
        if not expected_messages:
            expected_messages = ["App Translations Updated!"]

        file = StringIO()
        export_raw(excel_headers, excel_data, file, format=Format.XLS_2007)

        with tempfile.TemporaryFile(suffix='.xlsx') as f:
            f.write(file.getvalue())
            messages = process_bulk_app_translation_upload(self.app, f)

        self.assertListEqual(
            [m[1] for m in messages], expected_messages
        )
Esempio n. 8
0
def prepare_fixture_download(table_ids, domain, task, download_id):
    """Prepare fixture data for Excel download
    """
    data_types_book, excel_sheets = _prepare_fixture(table_ids,
                                                     domain,
                                                     task=task)

    header_groups = [("types", excel_sheets["types"]["headers"])]
    value_groups = [("types", excel_sheets["types"]["rows"])]
    for data_type in data_types_book:
        header_groups.append(
            (data_type.tag, excel_sheets[data_type.tag]["headers"]))
        value_groups.append(
            (data_type.tag, excel_sheets[data_type.tag]["rows"]))

    file = StringIO()
    format = Format.XLS_2007
    export_raw(tuple(header_groups), tuple(value_groups), file, format)
    return expose_cached_download(
        file.getvalue(),
        60 * 60 * 2,
        file_extension=".xlsx",
        mimetype=Format.from_format(format).mimetype,
        content_disposition='attachment; filename="%s_lookup-tables.xlsx"' %
        domain,
        download_id=download_id,
    )
Esempio n. 9
0
 def get(self, request, domain, app_id):
     language = request.GET.get('lang', 'en')
     modules = list(self.app.get_modules())
     case_meta = self.app.get_case_metadata()
     headers = [(_('All Forms'),
                 ('module_name', 'form_name', 'comment',
                  'module_display_condition', 'form_display_condition'))]
     headers += [(self._get_form_sheet_name(form, language),
                  tuple(FORM_SUMMARY_EXPORT_HEADER_NAMES))
                 for module in modules for form in module.get_forms()]
     data = list(
         (_('All Forms'), self.get_all_forms_row(module, form, language))
         for module in modules for form in module.get_forms())
     data += list((self._get_form_sheet_name(form, language),
                   self._get_form_row(form, language, case_meta))
                  for module in modules for form in module.get_forms())
     export_string = io.BytesIO()
     export_raw(tuple(headers), data, export_string, Format.XLS_2007),
     return export_response(
         export_string,
         Format.XLS_2007,
         '{app_name} v.{app_version} - Form Summary ({lang})'.format(
             app_name=self.app.name,
             app_version=self.app.version,
             lang=language),
     )
Esempio n. 10
0
def export_all_form_metadata(req, domain):
    """
    Export metadata for _all_ forms in a domain.
    """
    format = req.GET.get("format", Format.XLS_2007)

    headers = ("domain", "instanceID", "received_on", "type", "timeStart",
               "timeEnd", "deviceID", "username", "userID", "xmlns", "version")

    def _form_data_to_row(formdata):
        def _key_to_val(formdata, key):
            if key == "type":
                return xmlns_to_name(domain, formdata.xmlns, app_id=None)
            else:
                return getattr(formdata, key)

        return [_key_to_val(formdata, key) for key in headers]

    fd, path = tempfile.mkstemp()

    data = (_form_data_to_row(f) for f in stream_qs(
        HQFormData.objects.filter(domain=domain).order_by('received_on')))

    with os.fdopen(fd, 'w') as temp:
        export_raw((("forms", headers), ), (("forms", data), ), temp)

    return export_response(open(path), format, "%s_forms" % domain)
Esempio n. 11
0
def save_metadata_export_to_tempfile(domain):
    """
    Saves the domain's form metadata to a file. Returns the filename.
    """
    headers = ("domain", "instanceID", "received_on", "type",
               "timeStart", "timeEnd", "deviceID", "username",
               "userID", "xmlns", "version")

    def _form_data_to_row(formdata):
        def _key_to_val(formdata, key):
            if key == "type":
                return xmlns_to_name(domain, formdata.get("xmlns"), app_id=None)
            if key == "version":
                return formdata["form"].get("@version")
            if key in ["domain", "received_on", "xmlns"]:
                return formdata.get(key)
            return formdata["form"].get("meta", {}).get(key)
        return [_key_to_val(formdata, key) for key in headers]

    fd, path = tempfile.mkstemp()

    q = {
        "query": {"match_all": {}},
        "sort": [{"received_on" : {"order": "desc"}}],
    }

    results = stream_es_query(params={"domain.exact": domain}, q=q, es_url=XFORM_INDEX + '/xform/_search', size=999999)
    data = (_form_data_to_row(res["_source"]) for res in results)

    with os.fdopen(fd, 'w') as temp:
        export_raw((("forms", headers),), (("forms", data),), temp)

    return path
Esempio n. 12
0
    def get(self, request, domain, app_id):
        case_metadata = self.app.get_case_metadata()
        language = request.GET.get("lang", "en")

        headers = [("All Case Properties", ("case_type", "case_property"))]
        headers += list(
            (case_type.name, tuple(CASE_SUMMARY_EXPORT_HEADER_NAMES)) for case_type in case_metadata.case_types
        )

        data = list(
            ("All Case Properties", self.get_case_property_rows(case_type)) for case_type in case_metadata.case_types
        )
        data += list(
            (case_type.name, self.get_case_questions_rows(case_type, language))
            for case_type in case_metadata.case_types
        )

        export_string = StringIO()
        export_raw(tuple(headers), data, export_string, Format.XLS_2007),
        return export_response(
            export_string,
            Format.XLS_2007,
            u"{app_name} v.{app_version} - Case Summary ({lang})".format(
                app_name=self.app.name, app_version=self.app.version, lang=language
            ),
        )
Esempio n. 13
0
 def get(self, request, domain, app_id):
     language = request.GET.get('lang', 'en')
     modules = list(self.app.get_modules())
     case_meta = self.app.get_case_metadata()
     headers = [(_('All Forms'),
                 ('module_name', 'form_name', 'comment', 'module_display_condition', 'form_display_condition'))]
     headers += [
         (self._get_form_sheet_name(module, form, language), tuple(FORM_SUMMARY_EXPORT_HEADER_NAMES))
         for module in modules for form in module.get_forms()
     ]
     data = list((
         _('All Forms'),
         self.get_all_forms_row(module, form, language)
     ) for module in modules for form in module.get_forms())
     data += list(
         (self._get_form_sheet_name(module, form, language), self._get_form_row(form, language, case_meta))
         for module in modules for form in module.get_forms()
     )
     export_string = io.BytesIO()
     export_raw(tuple(headers), data, export_string, Format.XLS_2007),
     return export_response(
         export_string,
         Format.XLS_2007,
         '{app_name} v.{app_version} - Form Summary ({lang})'.format(
             app_name=self.app.name,
             app_version=self.app.version,
             lang=language
         ),
     )
Esempio n. 14
0
 def _get_workbook_from_data(self, headers, rows):
     file = BytesIO()
     export_raw(headers, rows, file, format=Format.XLS_2007)
     with tempfile.TemporaryFile(suffix='.xlsx') as f:
         f.write(file.getvalue())
         f.seek(0)
         return get_workbook(f)
Esempio n. 15
0
    def get(self, request, domain, app_id):
        case_metadata = self.app.get_case_metadata()
        language = request.GET.get('lang', 'en')

        headers = [(_('All Case Properties'), ('case_type', 'case_property', 'description')),
                   (_('Case Types'), ('type', 'relationships', 'opened_by', 'closed_by'))]
        headers += list((
            case_type.name,
            tuple(CASE_SUMMARY_EXPORT_HEADER_NAMES)
        )for case_type in case_metadata.case_types)

        data = [(
            _('All Case Properties'),
            self.get_case_property_rows(case_type)
        ) for case_type in case_metadata.case_types]
        data += [self.get_case_type_rows(case_metadata.case_types, language)]
        data += [(
            case_type.name,
            self.get_case_questions_rows(case_type, language)
        ) for case_type in case_metadata.case_types]

        export_string = io.BytesIO()
        export_raw(tuple(headers), data, export_string, Format.XLS_2007),
        return export_response(
            export_string,
            Format.XLS_2007,
            '{app_name} v.{app_version} - Case Summary ({lang})'.format(
                app_name=self.app.name,
                app_version=self.app.version,
                lang=language
            ),
        )
Esempio n. 16
0
def download_bulk_app_translations(request, domain, app_id):
    lang = request.GET.get('lang')
    skip_blacklisted = request.GET.get('skipbl', 'false') == 'true'
    app = get_app(domain, app_id)
    # if there is a lang selected, assume that user wants a single sheet
    is_single_sheet = bool(lang)
    headers = get_bulk_app_sheet_headers(
        app,
        single_sheet=is_single_sheet,
        lang=lang,
        eligible_for_transifex_only=skip_blacklisted)
    if is_single_sheet:
        sheets = get_bulk_app_single_sheet_by_name(app, lang, skip_blacklisted)
    else:
        sheets = get_bulk_app_sheets_by_name(
            app, eligible_for_transifex_only=skip_blacklisted)

    temp = io.BytesIO()
    data = [(k, v) for k, v in sheets.items()]
    export_raw(headers, data, temp)
    filename = '{app_name} v.{app_version} - App Translations{lang}{transifex_only}'.format(
        app_name=app.name,
        app_version=app.version,
        lang=' ' + lang if is_single_sheet else '',
        transifex_only=' (Transifex only)' if skip_blacklisted else '',
    )
    return export_response(temp, Format.XLS_2007, filename)
Esempio n. 17
0
 def get(self, request, domain, app_id):
     language = request.GET.get("lang", "en")
     modules = list(self.app.get_modules())
     headers = [("All Forms", ("module_name", "form_name", "comment"))]
     headers += [
         (self._get_form_sheet_name(module, form, language), tuple(FORM_SUMMARY_EXPORT_HEADER_NAMES))
         for module in modules
         for form in module.get_forms()
     ]
     data = list(
         ("All Forms", self.get_all_forms_row(module, form, language))
         for module in modules
         for form in module.get_forms()
     )
     data += list(
         (self._get_form_sheet_name(module, form, language), self._get_form_row(form, language))
         for module in modules
         for form in module.get_forms()
     )
     export_string = StringIO()
     export_raw(tuple(headers), data, export_string, Format.XLS_2007),
     return export_response(
         export_string,
         Format.XLS_2007,
         u"{app_name} v.{app_version} - Form Summary ({lang})".format(
             app_name=self.app.name, app_version=self.app.version, lang=language
         ),
     )
Esempio n. 18
0
def export_all_form_metadata(req, domain):
    """
    Export metadata for _all_ forms in a domain.
    """
    format = req.GET.get("format", Format.XLS_2007)
    
    headers = ("domain", "instanceID", "received_on", "type", 
               "timeStart", "timeEnd", "deviceID", "username", 
               "userID", "xmlns", "version")
    def _form_data_to_row(formdata):
        def _key_to_val(formdata, key):
            if key == "type":  return xmlns_to_name(domain, formdata.xmlns, app_id=None)
            else:              return getattr(formdata, key)
        return [_key_to_val(formdata, key) for key in headers]
    
    fd, path = tempfile.mkstemp()
    
    data = (_form_data_to_row(f) for f in stream_qs(
        HQFormData.objects.filter(domain=domain).order_by('received_on')
    ))

    with os.fdopen(fd, 'w') as temp:
        export_raw((("forms", headers),), (("forms", data),), temp)

    return export_response(open(path), format, "%s_forms" % domain)
Esempio n. 19
0
def download_bulk_app_translations(request, domain, app_id):
    app = get_app(domain, app_id)
    headers = expected_bulk_app_sheet_headers(app)
    rows = expected_bulk_app_sheet_rows(app)
    temp = StringIO()
    data = [(k, v) for k, v in rows.iteritems()]
    export_raw(headers, data, temp)
    return export_response(temp, Format.XLS_2007, "bulk_app_translations")
Esempio n. 20
0
    def export_to_file(self, output_file):
        header_sheets = []
        data_sheets = []
        for table in self.tables:
            header_sheets.append((table.name, TITLE_ROW))
            data_sheets.append((table.name, list(table.get_sheet_data())))

        export_raw(header_sheets, data_sheets, output_file, format=Format.XLS_2007)
Esempio n. 21
0
def download_bulk_app_translations(request, domain, app_id):
    app = get_app(domain, app_id)
    headers = expected_bulk_app_sheet_headers(app)
    rows = expected_bulk_app_sheet_rows(app)
    temp = io.BytesIO()
    data = [(k, v) for k, v in six.iteritems(rows)]
    export_raw(headers, data, temp)
    return export_response(temp, Format.XLS_2007, "bulk_app_translations")
Esempio n. 22
0
def download_item_lists(request, domain):
    data_types = FixtureDataType.by_domain(domain)
    data_type_schemas = []
    max_fields = 0
    max_groups = 0
    max_users = 0
    mmax_groups = 0
    mmax_users = 0
    data_tables = []
    

    for data_type in data_types:
        type_schema = [data_type.name, data_type.tag]
        fields = [field for field in data_type.fields]
        type_id = data_type.get_id
        data_table_of_type = []
        for item_row in FixtureDataItem.by_data_type(domain, type_id):
            group_len = len(item_row.get_groups())
            max_groups = group_len if group_len>max_groups else max_groups
            user_len = len(item_row.get_users())
            max_users = user_len if user_len>max_users else max_users
        for item_row in FixtureDataItem.by_data_type(domain, type_id):
            groups = [group.name for group in item_row.get_groups()] + ["" for x in range(0,max_groups-len(item_row.get_groups()))]
            users = [user.raw_username for user in item_row.get_users()] + ["" for x in range(0, max_users-len(item_row.get_users()))]
            data_row = tuple([str(_id_from_doc(item_row)),"N"]+
                             [item_row.fields[field] for field in fields]+
                             groups + users)
            data_table_of_type.append(data_row)
        type_schema.extend(fields)
        data_type_schemas.append(tuple(type_schema))
        if max_fields<len(type_schema):
            max_fields = len(type_schema)
        data_tables.append((data_type.tag,tuple(data_table_of_type)))
        mmax_users = max_users if max_users>mmax_users else mmax_users
        mmax_groups = max_groups if max_groups>mmax_groups else mmax_groups
        max_users = 0
        max_groups = 0

    type_headers = ["name", "tag"] + ["field %d" % x for x in range(1, max_fields - 1)]
    type_headers = ("types", tuple(type_headers))
    table_headers = [type_headers]    
    for type_schema in data_type_schemas:
        item_header = (type_schema[1], tuple(["UID", DELETE_HEADER] +
                                             ["field: " + x for x in type_schema[2:]] +
                                             ["group %d" % x for x in range(1, mmax_groups + 1)] +
                                             ["user %d" % x for x in range(1, mmax_users + 1)]))
        table_headers.append(item_header)

    table_headers = tuple(table_headers)
    type_rows = ("types", tuple(data_type_schemas))
    data_tables = tuple([type_rows]+data_tables)
    
    fd, path = tempfile.mkstemp()
    with os.fdopen(fd, 'w') as temp:
        export_raw((table_headers), (data_tables), temp)
    format = Format.XLS_2007
    return export_response(open(path), format, "%s_fixtures" % domain)
Esempio n. 23
0
def download_bulk_app_translations(request, domain, app_id):
    app = get_app(domain, app_id)
    headers = get_bulk_app_sheet_headers(app)
    rows = get_bulk_app_sheet_rows(app)
    temp = io.BytesIO()
    data = [(k, v) for k, v in six.iteritems(rows)]
    export_raw(headers, data, temp)
    filename = '{app_name} v.{app_version} - App Translations'.format(
        app_name=app.name, app_version=app.version)
    return export_response(temp, Format.XLS_2007, filename)
Esempio n. 24
0
def download_bulk_app_translations(request, domain, app_id):
    app = get_app(domain, app_id)
    headers = expected_bulk_app_sheet_headers(app)
    rows = expected_bulk_app_sheet_rows(app)
    temp = io.BytesIO()
    data = [(k, v) for k, v in six.iteritems(rows)]
    export_raw(headers, data, temp)
    filename = '{app_name} v.{app_version} - App Translations'.format(
        app_name=app.name,
        app_version=app.version)
    return export_response(temp, Format.XLS_2007, filename)
    def setUpClass(cls):
        cls.app = Application.wrap(cls.get_json("app"))
        # Todo, refactor this into BulkAppTranslationTestBase.upload_raw_excel_translations
        file = StringIO()
        export_raw(cls.excel_headers, cls.excel_data, file, format=Format.XLS_2007)

        with tempfile.TemporaryFile(suffix='.xlsx') as f:
            f.write(file.getvalue())
            wb_reader = WorkbookJSONReader(f)
            cls.expected_workbook = [{'name': ws.title, 'rows': list(ws)}
                                     for ws in wb_reader.worksheets]
    def setUpClass(cls):
        cls.app = Application.wrap(cls.get_json("app"))
        # Todo, refactor this into BulkAppTranslationTestBase.upload_raw_excel_translations
        file = StringIO()
        export_raw(cls.excel_headers, cls.excel_data, file, format=Format.XLS_2007)

        with tempfile.TemporaryFile(suffix='.xlsx') as f:
            f.write(file.getvalue())
            wb_reader = WorkbookJSONReader(f)
            cls.expected_workbook = [{'name': ws.title, 'rows': list(ws)}
                                     for ws in wb_reader.worksheets]
Esempio n. 27
0
def download_multimedia_paths(request, domain, app_id):
    from corehq.apps.hqmedia.view_helpers import download_multimedia_paths_rows
    app = get_app(domain, app_id)

    headers = ((_("Paths"), (_("Old Path"), _("New Path"), _("Usages"))), )
    rows = download_multimedia_paths_rows(app)

    temp = io.BytesIO()
    export_raw(headers, rows, temp)
    filename = '{app_name} v.{app_version} - App Multimedia Paths'.format(
        app_name=app.name, app_version=app.version)
    return export_response(temp, Format.XLS_2007, filename)
Esempio n. 28
0
def download_bulk_multimedia_translations(request, domain, app_id):
    lang = request.GET.get('lang')
    app = get_app(domain, app_id)

    headers = get_bulk_multimedia_sheet_headers(lang)
    rows = get_bulk_multimedia_sheet_rows(lang, app)

    temp = io.BytesIO()
    export_raw(headers, [(headers[0][0], rows)], temp)
    filename = '{app_name} v.{app_version} - Multimedia Translations {lang}'.format(
        app_name=app.name, app_version=app.version, lang=lang)
    return export_response(temp, Format.XLS_2007, filename)
    def _build_translation_download_file(self, headers, data=None):
        if data is None:
            data = []
            translations = get_default_translations_for_download(self.app, 'latest')
            for translation_key, translation_value in six.iteritems(translations):
                data.append((translation_key, translation_value))

        data = (('translations', tuple(data)),)
        temp = BytesIO()
        export_raw(headers, data, temp)
        temp.seek(0)            # .read() is used somewhere so this needs to be at the begininng
        return temp
Esempio n. 30
0
    def _build_translation_download_file(self, headers, data=None):
        if data is None:
            data = []
            translations = get_default_translations_for_download(self.app)
            for translation_key, translation_value in translations.iteritems():
                data.append((translation_key, translation_value))

        data = (('translations', tuple(data)),)
        temp = StringIO()
        export_raw(headers, data, temp)
        temp.seek(0)            # .read() is used somewhere so this needs to be at the begininng
        return temp
Esempio n. 31
0
    def get(self, request, domain):
        headers = get_conditional_alert_headers(self.domain)
        (translated_rows,
         untranslated_rows) = get_conditional_alert_rows(self.domain)

        temp = io.BytesIO()
        export_raw(headers, [
            (TranslatedConditionalAlertUploader.sheet_name, translated_rows),
            (UntranslatedConditionalAlertUploader.sheet_name,
             untranslated_rows),
        ], temp)
        filename = 'Conditional Alerts - {domain}'.format(domain=domain)
        return export_response(temp, Format.XLS_2007, filename)
Esempio n. 32
0
def download_multimedia_paths(request, domain, app_id):
    from corehq.apps.hqmedia.view_helpers import download_multimedia_paths_rows
    app = get_app(domain, app_id)

    headers = ((_("Paths"), (_("Old Path"), _("New Path"), _("Usages"))),)
    rows = download_multimedia_paths_rows(app, only_missing=request.GET.get('only_missing', False))

    temp = io.BytesIO()
    export_raw(headers, rows, temp)
    filename = '{app_name} v.{app_version} - App Multimedia Paths'.format(
        app_name=app.name,
        app_version=app.version)
    return export_response(temp, Format.XLS_2007, filename)
Esempio n. 33
0
def build_ui_translation_download_file(app):

    properties = tuple(["property"] + app.langs + ["platform"])
    temp = StringIO()
    headers = (("translations", properties), )

    row_dict = {}
    for i, lang in enumerate(app.langs):
        index = i + 1
        trans_dict = app.translations.get(lang, {})
        for prop, trans in trans_dict.iteritems():
            if prop not in row_dict:
                row_dict[prop] = [prop]
            num_to_fill = index - len(row_dict[prop])
            row_dict[prop].extend(
                ["" for i in range(num_to_fill)] if num_to_fill > 0 else [])
            row_dict[prop].append(trans)

    rows = row_dict.values()
    all_prop_trans = get_default_translations_for_download(app)
    rows.extend([[t] for t in sorted(all_prop_trans.keys())
                 if t not in row_dict])

    def fillrow(row):
        num_to_fill = len(properties) - len(row)
        row.extend(["" for i in range(num_to_fill)] if num_to_fill > 0 else [])
        return row

    def add_default(row):
        row_index = get_index_for_defaults(app.langs) + 1
        if not row[row_index]:
            # If no custom translation exists, replace it.
            row[row_index] = all_prop_trans.get(row[0], "")
        return row

    def add_sources(row):
        platform_map = {
            "CommCareAndroid": "Android",
            "CommCareJava": "Java",
            "ODK": "Android",
            "JavaRosa": "Java",
        }
        source = system_text_sources.SOURCES.get(row[0], "")
        row[-1] = platform_map.get(source, "")
        return row

    rows = [add_sources(add_default(fillrow(row))) for row in rows]

    data = (("translations", tuple(rows)), )
    export_raw(headers, data, temp)
    return temp
Esempio n. 34
0
def download_bulk_app_translations(request, domain, app_id):
    lang = request.GET.get('lang')
    app = get_app(domain, app_id)
    headers = get_bulk_app_sheet_headers(app, lang=lang)
    sheets = get_bulk_app_single_sheet_by_name(app, lang) if lang else get_bulk_app_sheets_by_name(app)

    temp = io.BytesIO()
    data = [(k, v) for k, v in six.iteritems(sheets)]
    export_raw(headers, data, temp)
    filename = '{app_name} v.{app_version} - App Translations{lang}'.format(
        app_name=app.name,
        app_version=app.version,
        lang=' ' + lang if lang else '')
    return export_response(temp, Format.XLS_2007, filename)
Esempio n. 35
0
def build_ui_translation_download_file(app):

    properties = tuple(["property"] + app.langs + ["platform"])
    temp = StringIO()
    headers = (("translations", properties),)

    row_dict = {}
    for i, lang in enumerate(app.langs):
        index = i + 1
        trans_dict = app.translations.get(lang, {})
        for prop, trans in trans_dict.iteritems():
            if prop not in row_dict:
                row_dict[prop] = [prop]
            num_to_fill = index - len(row_dict[prop])
            row_dict[prop].extend(["" for i in range(num_to_fill)] if num_to_fill > 0 else [])
            row_dict[prop].append(trans)

    rows = row_dict.values()
    all_prop_trans = get_default_translations_for_download(app)
    rows.extend([[t] for t in sorted(all_prop_trans.keys()) if t not in row_dict])

    def fillrow(row):
        num_to_fill = len(properties) - len(row)
        row.extend(["" for i in range(num_to_fill)] if num_to_fill > 0 else [])
        return row

    def add_default(row):
        row_index = get_index_for_defaults(app.langs) + 1
        if not row[row_index]:
            # If no custom translation exists, replace it.
            row[row_index] = all_prop_trans.get(row[0], "")
        return row

    def add_sources(row):
        platform_map = {
            "CommCareAndroid": "Android",
            "CommCareJava": "Java",
            "ODK": "Android",
            "JavaRosa": "Java",
        }
        source = system_text_sources.SOURCES.get(row[0], "")
        row[-1] = platform_map.get(source, "")
        return row

    rows = [add_sources(add_default(fillrow(row))) for row in rows]

    data = (("translations", tuple(rows)),)
    export_raw(headers, data, temp)
    return temp
Esempio n. 36
0
 def test_validate_new_site_codes_type(self, location_type_mock, locations_mock, *_):
     location_type_mock.by_domain.return_value = self.location_types
     location_type_mock.select_related.return_value.filter.return_value = self.location_types
     locations_mock.select_related.return_value.filter.return_value = [
         Location(site_code='13', location_type=self.state_location_type)
     ]
     with tempfile.TemporaryFile() as file:
         export_raw(self.headers, self.rows, file, format=Format.XLS_2007)
         file.seek(0)
         workbook = get_workbook(file)
         parser = Parser(self.domain, workbook)
         errors = parser.parse()
         self.assertIn('state 13 used as supervisor', errors)
         self.assertIn('state 13 used as awc', errors)
         self.assertNotIn('state 13 used as state', errors)
Esempio n. 37
0
    def get(self, request, domain):
        title = _("Conditional Alerts")
        headers = ((title, (_('id'), _('name'), _('case_type'))),)
        rows = [(
            rule.pk,
            rule.name,
            rule.case_type,
        ) for rule in self.get_conditional_alerts_queryset()]

        temp = io.BytesIO()
        export_raw(headers, [(title, rows)], temp)
        filename = '{title} - {domain}'.format(
            domain=domain,
            title=title)
        return export_response(temp, Format.XLS_2007, filename)
Esempio n. 38
0
 def test_validate_parents(self, location_type_mock, locations_mock, *_):
     location_type_mock.by_domain.return_value = self.location_types
     location_type_mock.select_related.return_value.filter.return_value = self.location_types
     locations_mock.select_related.return_value.filter.return_value = [
         Location(site_code='13', location_type=self.state_location_type)
     ]
     with tempfile.TemporaryFile() as file:
         export_raw(self.headers, self.rows, file, format=Format.XLS_2007)
         file.seek(0)
         workbook = get_workbook(file)
         parser = Parser(self.domain, workbook)
         errors = parser.parse()
         self.assertIn('Unexpected non-state parent 1 set for supervisor', errors, "missing location found")
         self.assertIn('Unexpected state parent 13 set for awc', errors, "incorrect parent type not flagged")
         self.assertIn('Parent 12 is marked for archival', errors, "archived parent not caught")
Esempio n. 39
0
 def dump(self, transitions):
     """
     :return: excel workbook with one tab with title as old location's site code,
     which holds details all household cases assigned to it
     """
     rows = {}
     for operation, details in transitions.items():
         if operation in self.valid_operations:
             rows.update(self._get_rows_for_location(operation, details))
     if rows:
         stream = io.BytesIO()
         headers = [[site_code, self.headers] for site_code in rows]
         export_raw(headers, rows.items(), stream)
         stream.seek(0)
         return stream
Esempio n. 40
0
def save_metadata_export_to_tempfile(domain, datespan=None, user_ids=None):
    """
    Saves the domain's form metadata to a file. Returns the filename.
    """
    headers = ("domain", "instanceID", "received_on", "type",
               "timeStart", "timeEnd", "deviceID", "username",
               "userID", "xmlns", "version")

    def _form_data_to_row(formdata):
        def _key_to_val(formdata, key):
            if key == "type":
                return xmlns_to_name(domain, formdata.get("xmlns"), app_id=None)
            if key == "version":
                return formdata["form"].get("@version")
            if key in ["domain", "received_on", "xmlns"]:
                return formdata.get(key)
            return formdata["form"].get("meta", {}).get(key)
        return [_key_to_val(formdata, key) for key in headers]

    fd, path = tempfile.mkstemp()

    q = {
        "query": {"match_all": {}},
        "sort": [{"received_on" : {"order": "desc"}}],
        "filter": {"and": []},
    }

    if datespan:
        q["query"] = {
            "range": {
                "form.meta.timeEnd": {
                    "from": datespan.startdate_param,
                    "to": datespan.enddate_param,
                    "include_upper": False,
                }
            }
        }

    if user_ids is not None:
        q["filter"]["and"].append({"terms": {"form.meta.userID": user_ids}})

    results = stream_es_query(params={"domain.exact": domain}, q=q, es_url=XFORM_INDEX + '/xform/_search', size=999999)
    data = (_form_data_to_row(res["_source"]) for res in results)

    with os.fdopen(fd, 'w') as temp:
        export_raw((("forms", headers),), (("forms", data),), temp)

    return path
Esempio n. 41
0
def save_metadata_export_to_tempfile(domain, format, datespan=None, user_ids=None):
    """
    Saves the domain's form metadata to a file. Returns the filename.
    """
    headers = ("domain", "instanceID", "received_on", "type",
               "timeStart", "timeEnd", "deviceID", "username",
               "userID", "xmlns", "version")

    def _form_data_to_row(formdata):
        def _key_to_val(formdata, key):
            if key == "type":
                return xmlns_to_name(domain, formdata.get("xmlns"), app_id=None)
            if key == "version":
                return formdata["form"].get("@version")
            if key in ["domain", "received_on", "xmlns"]:
                return formdata.get(key)
            return formdata["form"].get("meta", {}).get(key)
        return [_key_to_val(formdata, key) for key in headers]

    fd, path = tempfile.mkstemp()

    q = {
        "query": {"match_all": {}},
        "sort": [{"received_on" : {"order": "desc"}}],
        "filter": {"and": []},
    }

    if datespan:
        q["query"] = {
            "range": {
                "form.meta.timeEnd": {
                    "from": datespan.startdate_param,
                    "to": datespan.enddate_param,
                    "include_upper": False,
                }
            }
        }

    if user_ids is not None:
        q["filter"]["and"].append({"terms": {"form.meta.userID": user_ids}})

    results = stream_es_query(params={"domain.exact": domain}, q=q, es_url=XFORM_INDEX + '/xform/_search', size=999999)
    data = (_form_data_to_row(res["_source"]) for res in results)

    with os.fdopen(fd, 'w') as temp:
        export_raw((("forms", headers),), (("forms", data),), temp, format=format)

    return path
Esempio n. 42
0
    def _upload(self, data, headers=None):
        if headers is None:
            headers = (
                ("translated", ("id", "name", "message_en", "message_es")),
                ("not translated", ("id", "name", "message")),
            )

        file = BytesIO()
        export_raw(headers, data, file, format=Format.XLS_2007)

        with tempfile.TemporaryFile(suffix='.xlsx') as f:
            f.write(file.getvalue())
            f.seek(0)
            workbook = get_workbook(f)
            msgs = upload_conditional_alert_workbook(self.domain, workbook)
            return [m[1] for m in msgs]     # msgs is tuples of (type, message); ignore the type
Esempio n. 43
0
 def dump(self, transitions_per_location_type):
     """
     :param transitions_per_location_type: location types mapped to transitions where
     each transition is a dict with an operation
     like merge or split mapped to details for the operation
     Check Parser for the expected format for each operation
     """
     location_types = list(transitions_per_location_type.keys())
     headers = [[location_type, DUMPER_COLUMNS]
                for location_type in location_types]
     stream = io.BytesIO()
     self._setup_site_codes(list(transitions_per_location_type.values()))
     rows = self._rows(transitions_per_location_type).items()
     export_raw(headers, rows, stream)
     stream.seek(0)
     return stream
Esempio n. 44
0
def domain_list_download(request):
    domains = Domain.get_all()
    properties = ("name", "city", "country", "region", "project_type", "customer_type", "is_test?")

    def _row(domain):
        def _prop(domain, prop):
            if prop.endswith("?"):
                return yesno(getattr(domain, prop[:-1], ""))
            return getattr(domain, prop, "")

        return (_prop(domain, prop) for prop in properties)

    temp = StringIO()
    headers = (("domains", properties),)
    data = (("domains", (_row(domain) for domain in domains)),)
    export_raw(headers, data, temp)
    return export_response(temp, Format.XLS_2007, "domains")
Esempio n. 45
0
    def handle(self, domain, start_date, end_date, **options):
        start_timestamp, end_timestamp = self.get_start_and_end_timestamps(start_date, end_date)
        self.recipient_id_to_location_id = {}
        self.location_id_to_location = {}
        self.location_id_to_state_code = {}
        self.state_code_to_name = {'unknown': 'Unknown'}

        data = {}

        filename = 'icds-sms-usage--%s--%s.xlsx' % (
            start_date.strftime('%Y-%m-%d'),
            end_date.strftime('%Y-%m-%d'),
        )

        for sms in SMS.objects.filter(
            domain=domain,
            date__gt=start_timestamp,
            date__lte=end_timestamp,
            backend_api=SQLICDSBackend.get_api_id(),
            direction='O',
            processed=True,
        ):
            location = self.get_location(sms)
            state_code = self.get_state_code(location)
            if state_code not in data:
                data[state_code] = {}

            indicator_slug = self.get_indicator_slug(sms)
            if indicator_slug not in data[state_code]:
                data[state_code][indicator_slug] = 0

            data[state_code][indicator_slug] += 1

        with open(filename, 'wb') as f:
            headers = ('State Code', 'State Name', 'Indicator', 'SMS Count')
            excel_data = []

            for state_code, state_data in data.items():
                for indicator_slug, count in state_data.items():
                    excel_data.append((state_code, self.state_code_to_name[state_code], indicator_slug, count))

            export_raw(
                (('icds-sms-usage', headers), ),
                (('icds-sms-usage', excel_data), ),
                f
            )
Esempio n. 46
0
    def handle(self, domain, start_date, end_date, **options):
        start_timestamp, end_timestamp = self.get_start_and_end_timestamps(start_date, end_date)
        self.recipient_id_to_location_id = {}
        self.location_id_to_location = {}
        self.location_id_to_state_code = {}
        self.state_code_to_name = {'unknown': 'Unknown'}

        data = {}

        filename = 'icds-sms-usage--%s--%s.xlsx' % (
            start_date.strftime('%Y-%m-%d'),
            end_date.strftime('%Y-%m-%d'),
        )

        for sms in SMS.objects.filter(
            domain=domain,
            date__gt=start_timestamp,
            date__lte=end_timestamp,
            backend_api=AirtelTCLBackend.get_api_id(),
            direction='O',
            processed=True,
        ):
            location = self.get_location(sms)
            state_code = self.get_state_code(location)
            if state_code not in data:
                data[state_code] = {}

            indicator_slug = self.get_indicator_slug(sms)
            if indicator_slug not in data[state_code]:
                data[state_code][indicator_slug] = 0

            data[state_code][indicator_slug] += 1

        with open(filename, 'wb') as f:
            headers = ('State Code', 'State Name', 'Indicator', 'SMS Count')
            excel_data = []

            for state_code, state_data in data.items():
                for indicator_slug, count in state_data.items():
                    excel_data.append((state_code, self.state_code_to_name[state_code], indicator_slug, count))

            export_raw(
                (('icds-sms-usage', headers), ),
                (('icds-sms-usage', excel_data), ),
                f
            )
Esempio n. 47
0
def domain_list_download(request):
    domains = Domain.get_all()
    properties = ("name", "city", "countries", "region", "project_type",
                  "customer_type", "is_test?")
    
    def _row(domain):
        def _prop(domain, prop):
            if prop.endswith("?"):
                return yesno(getattr(domain, prop[:-1], ""))
            return getattr(domain, prop, "")
        return (_prop(domain, prop) for prop in properties)
    
    temp = StringIO()
    headers = (("domains", properties),)   
    data = (("domains", (_row(domain) for domain in domains)),)
    export_raw(headers, data, temp)
    return export_response(temp, Format.XLS_2007, "domains")
Esempio n. 48
0
def export_all_form_metadata(req, domain):
    """
    Export metadata for _all_ forms in a domain.
    """
    format = req.GET.get("format", Format.XLS_2007)
    
    headers = ("domain", "instanceID", "received_on", "type", 
               "timeStart", "timeEnd", "deviceID", "username", 
               "userID", "xmlns", "version")
    def _form_data_to_row(formdata):
        def _key_to_val(formdata, key):
            if key == "type":  return xmlns_to_name(domain, formdata.xmlns, app_id=None)
            else:              return getattr(formdata, key)
        return [_key_to_val(formdata, key) for key in headers]
    
    temp = StringIO()
    data = (_form_data_to_row(f) for f in HQFormData.objects.filter(domain=domain))
    export_raw((("forms", headers),), (("forms", data),), temp)
    return export_response(temp, format, "%s_forms" % domain)
Esempio n. 49
0
def download_sms_translations(request, domain):
    tdoc = StandaloneTranslationDoc.get_obj(domain, "sms")
    columns = ["property"] + tdoc.langs + ["default"]

    msg_ids = sorted(_MESSAGES.keys())
    rows = []
    for msg_id in msg_ids:
        rows.append([msg_id])

    for lang in tdoc.langs:
        for row in rows:
            row.append(tdoc.translations[lang].get(row[0], ""))

    for row in rows:
        row.append(_MESSAGES.get(row[0]))

    temp = StringIO()
    headers = (("translations", tuple(columns)),)
    data = (("translations", tuple(rows)),)
    export_raw(headers, data, temp)
    return export_response(temp, Format.XLS_2007, "translations")
Esempio n. 50
0
def prepare_fixture_download(table_ids, domain, task, download_id):
    """Prepare fixture data for Excel download
    """
    data_types_book, excel_sheets = _prepare_fixture(table_ids, domain, task=task)

    header_groups = [("types", excel_sheets["types"]["headers"])]
    value_groups = [("types", excel_sheets["types"]["rows"])]
    for data_type in data_types_book:
        header_groups.append((data_type.tag, excel_sheets[data_type.tag]["headers"]))
        value_groups.append((data_type.tag, excel_sheets[data_type.tag]["rows"]))

    file = StringIO()
    format = Format.XLS_2007
    export_raw(tuple(header_groups), tuple(value_groups), file, format)
    return expose_cached_download(
        file.getvalue(),
        60 * 60 * 2,
        mimetype=Format.from_format(format).mimetype,
        content_disposition='attachment; filename="%s_fixtures.xlsx"' % domain,
        download_id=download_id,
    )
Esempio n. 51
0
    def test_export_raw(self):
        headers = (('people', ('name', 'gender')), ('offices', ('location',
                                                                'name')))
        data = (('people', [('danny', 'male'), ('amelia', 'female'),
                            ('carter', 'various')]),
                ('offices', [('Delhi, India', 'DSI'),
                             ('Boston, USA', 'Dimagi, Inc'),
                             ('Capetown, South Africa', 'DSA')]))
        EXPECTED = {
            "offices": {
                "headers": ["location", "name"],
                "rows": [["Delhi, India", "DSI"],
                         ["Boston, USA", "Dimagi, Inc"],
                         ["Capetown, South Africa", "DSA"]]
            },
            "people": {
                "headers": ["name", "gender"],
                "rows": [["danny", "male"], ["amelia", "female"],
                         ["carter", "various"]]
            }
        }

        that = self

        class Tester(object):
            def __enter__(self):
                self.buffer = BytesIO()
                return self.buffer

            def __exit__(self, exc_type, exc_val, exc_tb):
                if exc_type is None:
                    that.assertDictEqual(json.loads(self.buffer.getvalue()),
                                         EXPECTED)
                self.buffer.close()

        with Tester() as buffer:
            export_raw(headers, data, buffer, format=Format.JSON)

        with Tester() as buffer:
            # test lists
            export_raw(list(headers), list(data), buffer, format=Format.JSON)

        with Tester() as buffer:
            # test generators
            export_raw((h for h in headers),
                       ((name, (r for r in rows)) for name, rows in data),
                       buffer,
                       format=Format.JSON)

        with Tester() as buffer:
            # test export_from_tables
            headers = dict(headers)
            data = dict(data)
            tables = {}
            for key in set(headers.keys()) | set(data.keys()):
                tables[key] = itertools.chain([headers[key]], data[key])

            export_from_tables(list(tables.items()),
                               buffer,
                               format=Format.JSON)
Esempio n. 52
0
    def handle(self, domain, start_date, end_date, **options):
        self.recipient_details = {}
        self.location_details = {}
        start_timestamp, end_timestamp = self.get_start_and_end_timestamps(
            start_date, end_date)

        headers = (
            'Date (IST)',
            'Phone Number',
            'Recipient Name',
            'State Name',
            'District Name',
            'Block Name',
            'LS Name',
            'AWC Name',
            'Text',
        )

        for state_code in (
                ANDHRA_PRADESH_SITE_CODE,
                BIHAR_SITE_CODE,
                CHHATTISGARH_SITE_CODE,
                JHARKHAND_SITE_CODE,
                MADHYA_PRADESH_SITE_CODE,
                RAJASTHAN_SITE_CODE,
        ):
            export_name = self.get_export_name(state_code, start_date,
                                               end_date)
            with open('%s.xlsx' % export_name, 'wb') as f:
                records = self.get_records(
                    domain,
                    start_timestamp,
                    end_timestamp,
                    indicator_filter=['beneficiary_1', 'beneficiary_2'],
                    state_filter=[state_code])

                data = tuple(record[:9] for record in records)

                export_raw(((export_name, headers), ), ((export_name, data), ),
                           f)
Esempio n. 53
0
 def test_parser(self, location_type_mock, _):
     type_codes = ['state', 'supervisor', 'awc']
     location_type_mock.return_value = list(
         map(lambda site_code: LocationType(code=site_code), type_codes))
     with tempfile.TemporaryFile() as file:
         export_raw(self.headers, self.rows, file, format=Format.XLS_2007)
         file.seek(0)
         workbook = get_workbook(file)
         parser = Parser(self.domain, workbook)
         errors = parser.parse()
         self.assertEqual(parser.valid_transitions['awc']['Move'],
                          {'131': '112'})
         self.assertEqual(parser.valid_transitions['awc']['Merge'],
                          {'132': ['113', '114']})
         self.assertEqual(parser.valid_transitions['supervisor']['Move'],
                          {'13': '12'})
         self.assertEqual(errors, [
             "No change in location code for Extract, got old: '111' and new: '111'",
             "New location 132 reused with different information",
             "Missing location code for Split, got old: '11' and new: ''",
             "Invalid Operation Unknown"
         ])
Esempio n. 54
0
    def handle(self, domain, start_date, end_date, **options):
        self.recipient_details = {}
        self.location_details = {}
        start_timestamp, end_timestamp = self.get_start_and_end_timestamps(start_date, end_date)

        filename = 'icds-sms-export--%s--%s.xlsx' % (
            start_date.strftime('%Y-%m-%d'),
            end_date.strftime('%Y-%m-%d'),
        )

        with open(filename, 'wb') as f:
            headers = (
                'Date (IST)',
                'Phone Number',
                'Recipient Name',
                'State Name',
                'District Name',
                'Block Name',
                'LS Name',
                'AWC Name',
                'Text',
                'Recipient Type',
                'Recipient Id',
                'Indicator',
                'State Id',
                'District Id',
                'Block Id',
                'LS Id',
                'AWC Id',
            )

            data = tuple(self.get_records(domain, start_timestamp, end_timestamp))

            export_raw(
                (('icds-sms-export', headers), ),
                (('icds-sms-export', data), ),
                f
            )
    def handle(self, domain, start_date, end_date, **options):
        self.recipient_details = {}
        self.location_details = {}
        start_timestamp, end_timestamp = self.get_start_and_end_timestamps(start_date, end_date)

        headers = (
            'Date (IST)',
            'Phone Number',
            'Recipient Name',
            'State Name',
            'District Name',
            'Block Name',
            'LS Name',
            'AWC Name',
            'Text',
        )

        for state_code in (
            ANDHRA_PRADESH_SITE_CODE,
            BIHAR_SITE_CODE,
            CHHATTISGARH_SITE_CODE,
            JHARKHAND_SITE_CODE,
            MADHYA_PRADESH_SITE_CODE,
            RAJASTHAN_SITE_CODE,
        ):
            export_name = self.get_export_name(state_code, start_date, end_date)
            with open('%s.xlsx' % export_name, 'wb') as f:
                records = self.get_records(domain, start_timestamp, end_timestamp,
                    indicator_filter=['beneficiary_1', 'beneficiary_2'], state_filter=[state_code])

                data = tuple(record[:9] for record in records)

                export_raw(
                    ((export_name, headers), ),
                    ((export_name, data), ),
                    f
                )
Esempio n. 56
0
    def test_export_raw(self):
        headers = (('people', ('name', 'gender')), ('offices', ('location', 'name')))
        data = (
            ('people', [('danny', 'male'), ('amelia', 'female'), ('carter', 'various')]),
            ('offices', [('Delhi, India', 'DSI'), ('Boston, USA', 'Dimagi, Inc'), ('Capetown, South Africa', 'DSA')])
        )
        EXPECTED = {"offices": {"headers": ["location", "name"], "rows": [["Delhi, India", "DSI"], ["Boston, USA", "Dimagi, Inc"], ["Capetown, South Africa", "DSA"]]}, "people": {"headers": ["name", "gender"], "rows": [["danny", "male"], ["amelia", "female"], ["carter", "various"]]}}

        that = self

        class Tester(object):

            def __enter__(self):
                self.buffer = BytesIO()
                return self.buffer

            def __exit__(self, exc_type, exc_val, exc_tb):
                if exc_type is None:
                    that.assertDictEqual(json.loads(self.buffer.getvalue()), EXPECTED)
                self.buffer.close()

        with Tester() as buffer:
            export_raw(headers, data, buffer, format=Format.JSON)

        with Tester() as buffer:
            # test lists
            export_raw(list(headers), list(data), buffer, format=Format.JSON)

        with Tester() as buffer:
            # test generators
            export_raw((h for h in headers), ((name, (r for r in rows)) for name, rows in data), buffer, format=Format.JSON)
            
        with Tester() as buffer:
            # test export_from_tables
            headers = dict(headers)
            data = dict(data)
            tables = {}
            for key in set(headers.keys()) | set(data.keys()):
                tables[key] = itertools.chain([headers[key]], data[key])

            export_from_tables(list(tables.items()), buffer, format=Format.JSON)
Esempio n. 57
0
def download_item_lists(request, domain):
    data_types = FixtureDataType.by_domain(domain)
    data_type_schemas = []
    max_fields = 0
    max_groups = 0
    max_users = 0
    mmax_groups = 0
    mmax_users = 0
    data_tables = []
    
    def _get_empty_list(length):
        return ["" for x in range(0, length)]


    # Fills sheets' schemas and data
    for data_type in data_types:
        type_schema = [str(_id_from_doc(data_type)), "N", data_type.name, data_type.tag, yesno(data_type.is_global)]
        fields = [field for field in data_type.fields]
        type_id = data_type.get_id
        data_table_of_type = []
        for item_row in FixtureDataItem.by_data_type(domain, type_id):
            group_len = len(item_row.get_groups())
            max_groups = group_len if group_len>max_groups else max_groups
            user_len = len(item_row.get_users())
            max_users = user_len if user_len>max_users else max_users
        for item_row in FixtureDataItem.by_data_type(domain, type_id):
            groups = [group.name for group in item_row.get_groups()] + _get_empty_list(max_groups - len(item_row.get_groups()))
            users = [user.raw_username for user in item_row.get_users()] + _get_empty_list(max_users - len(item_row.get_users()))
            data_row = tuple([str(_id_from_doc(item_row)), "N"] +
                             [item_row.fields.get(field, None) or "" for field in fields] +
                             groups + users)
            data_table_of_type.append(data_row)
        type_schema.extend(fields)
        data_type_schemas.append(tuple(type_schema))
        if max_fields<len(type_schema):
            max_fields = len(type_schema)
        data_tables.append((data_type.tag,tuple(data_table_of_type)))
        mmax_users = max_users if max_users>mmax_users else mmax_users
        mmax_groups = max_groups if max_groups>mmax_groups else mmax_groups
        max_users = 0
        max_groups = 0

    type_headers = ["UID", DELETE_HEADER, "name", "tag", 'is_global?'] + ["field %d" % x for x in range(1, max_fields - 4)]
    type_headers = ("types", tuple(type_headers))
    table_headers = [type_headers]    
    for type_schema in data_type_schemas:
        item_header = (type_schema[3], tuple(["UID", DELETE_HEADER] +
                                             ["field: " + x for x in type_schema[5:]] +
                                             ["group %d" % x for x in range(1, mmax_groups + 1)] +
                                             ["user %d" % x for x in range(1, mmax_users + 1)]))
        table_headers.append(item_header)

    table_headers = tuple(table_headers)
    type_rows = ("types", tuple(data_type_schemas))
    data_tables = tuple([type_rows]+data_tables)

    """
    Example of sheets preperation:
    
    headers:
     (("employee", ("id", "name", "gender")),
      ("building", ("id", "name", "address")))
    
    data:
     (("employee", (("1", "cory", "m"),
                    ("2", "christian", "m"),
                    ("3", "amelia", "f"))),
      ("building", (("1", "dimagi", "585 mass ave."),
                    ("2", "old dimagi", "529 main st."))))
    """
    
    fd, path = tempfile.mkstemp()
    with os.fdopen(fd, 'w') as temp:
        export_raw((table_headers), (data_tables), temp)
    format = Format.XLS_2007
    return export_response(open(path), format, "%s_fixtures" % domain)
 def get_worksheet(self, title):
     string_io = StringIO()
     export_raw(self.headers, self.data, string_io, format=Format.XLS_2007)
     string_io.seek(0)
     workbook = WorkbookJSONReader(string_io)  # __init__ will read string_io
     return workbook.worksheets_by_title[title]
Esempio n. 59
0
    def get(self, request, domain, app_id):
        language = request.GET.get('lang', 'en')
        headers = [(self.app.name, tuple(APP_SUMMARY_EXPORT_HEADER_NAMES))]
        data = [(self.app.name, [
            AppSummaryRow(
                app=self.app.name,
                comments=self.app.comment,
            )
        ])]

        for module in self.app.get_modules():
            try:
                case_list_filter = module.case_details.short.filter
            except AttributeError:
                case_list_filter = None

            data += [
                (self.app.name, [
                    AppSummaryRow(
                        app=self.app.name,
                        module=_get_translated_module_name(self.app, module.unique_id, language),
                        display_filter=module.module_filter,
                        case_type=module.case_type,
                        case_list_filter=case_list_filter,
                        case_actions=module.case_details.short.filter if hasattr(module, 'case_details') else None,
                        filter=module.module_filter,
                        module_type='advanced' if isinstance(module, AdvancedModule) else 'standard',
                        comments=module.comment,
                        parent_module=(_get_translated_module_name(self.app, module.root_module_id, language)
                                       if module.root_module_id else '')
                    )
                ])
            ]
            for form in module.get_forms():
                post_form_workflow = form.post_form_workflow
                if form.post_form_workflow == WORKFLOW_FORM:
                    post_form_workflow = "form:\n{}".format(
                        "\n".join(
                            ["{form}: {xpath} [{datums}]".format(
                                form=_get_translated_form_name(self.app, link.form_id, language),
                                xpath=link.xpath,
                                datums=", ".join(
                                    "{}: {}".format(
                                        datum.name, datum.xpath
                                    ) for datum in link.datums)
                            ) for link in form.form_links]
                        )
                    )
                data += [
                    (self.app.name, [
                        AppSummaryRow(
                            app=self.app.name,
                            module=_get_translated_module_name(self.app, module.unique_id, language),
                            form=_get_translated_form_name(self.app, form.get_unique_id(), language),
                            display_filter=form.form_filter,
                            case_type=form.get_case_type(),
                            case_actions=self._get_form_actions(form),
                            filter=form.form_filter,
                            module_type='advanced' if isinstance(module, AdvancedModule) else 'standard',
                            comments=form.comment,
                            end_of_form_navigation=post_form_workflow,
                        )
                    ])
                ]

        export_string = io.BytesIO()
        export_raw(tuple(headers), data, export_string, Format.XLS_2007),
        return export_response(
            export_string,
            Format.XLS_2007,
            '{app_name} v.{app_version} - App Summary ({lang})'.format(
                app_name=self.app.name,
                app_version=self.app.version,
                lang=language
            ),
        )