def get(self, request, domain, app_id): case_metadata = self.app.get_case_metadata() language = request.GET.get("lang", "en") headers = [("All Case Properties", ("case_type", "case_property"))] headers += list( (case_type.name, tuple(CASE_SUMMARY_EXPORT_HEADER_NAMES)) for case_type in case_metadata.case_types ) data = list( ("All Case Properties", self.get_case_property_rows(case_type)) for case_type in case_metadata.case_types ) data += list( (case_type.name, self.get_case_questions_rows(case_type, language)) for case_type in case_metadata.case_types ) export_string = StringIO() export_raw(tuple(headers), data, export_string, Format.XLS_2007), return export_response( export_string, Format.XLS_2007, u"{app_name} v.{app_version} - Case Summary ({lang})".format( app_name=self.app.name, app_version=self.app.version, lang=language ), )
def upload_raw_excel_translations(self, excel_headers, excel_data, expected_messages=None): """ Prepares bulk app translation excel file and uploads it Structure of the xlsx file can be specified as following excel_headers: (("employee", ("id", "name", "gender")), ("building", ("id", "name", "address"))) excel_data: (("employee", (("1", "cory", "m"), ("2", "christian", "m"), ("3", "amelia", "f"))), ("building", (("1", "dimagi", "585 mass ave."), ("2", "old dimagi", "529 main st.")))) """ if not expected_messages: expected_messages = ["App Translations Updated!"] file = StringIO() export_raw(excel_headers, excel_data, file, format=Format.XLS_2007) with tempfile.TemporaryFile(suffix='.xlsx') as f: f.write(file.getvalue()) messages = process_bulk_app_translation_upload(self.app, f) self.assertListEqual( [m[1] for m in messages], expected_messages )
def _export_pis(report, report_slug, non_data_cols=3): context = report.get_data(include_urls=False) # THESE FUNCTIONS ARE TOTAL HACKS. # they rely on knowing the first two or three values are clinic, year, month, # and then the rest are fractional def _transform_headings(headings, non_data_cols): ret = headings[:non_data_cols] for h in headings[non_data_cols:]: ret.append("%s num" % h) ret.append("%s denom" % h) ret.append("%s pct" % h) return ret def _transform_rows(rows, non_data_cols): return [_transform_values(r, non_data_cols) for r in rows] def _transform_values(values, non_data_cols): ret = values[:non_data_cols] for v in values[non_data_cols:]: if v != "N/A": for special in "(/)": v = v.replace(special, " ") pct, num, denom = v.split() ret.extend([num, denom, pct]) else: ret.extend(["N/A"] * 3) return ret temp = StringIO() export_raw((("data", _transform_headings(context["headings"], non_data_cols)),), (("data", _transform_rows(context["rows"], non_data_cols)),), temp) return export_response(temp, "xlsx", report_slug)
def get(self, request, domain, app_id): language = request.GET.get("lang", "en") modules = list(self.app.get_modules()) headers = [("All Forms", ("module_name", "form_name", "comment"))] headers += [ (self._get_form_sheet_name(module, form, language), tuple(FORM_SUMMARY_EXPORT_HEADER_NAMES)) for module in modules for form in module.get_forms() ] data = list( ("All Forms", self.get_all_forms_row(module, form, language)) for module in modules for form in module.get_forms() ) data += list( (self._get_form_sheet_name(module, form, language), self._get_form_row(form, language)) for module in modules for form in module.get_forms() ) export_string = StringIO() export_raw(tuple(headers), data, export_string, Format.XLS_2007), return export_response( export_string, Format.XLS_2007, u"{app_name} v.{app_version} - Form Summary ({lang})".format( app_name=self.app.name, app_version=self.app.version, lang=language ), )
def get(self, request, domain, app_id): language = request.GET.get('lang', 'en') modules = list(self.app.get_modules()) case_meta = self.app.get_case_metadata() headers = [(_('All Forms'), ('module_name', 'form_name', 'comment', 'module_display_condition', 'form_display_condition'))] headers += [ (self._get_form_sheet_name(module, form, language), tuple(FORM_SUMMARY_EXPORT_HEADER_NAMES)) for module in modules for form in module.get_forms() ] data = list(( _('All Forms'), self.get_all_forms_row(module, form, language) ) for module in modules for form in module.get_forms()) data += list( (self._get_form_sheet_name(module, form, language), self._get_form_row(form, language, case_meta)) for module in modules for form in module.get_forms() ) export_string = io.BytesIO() export_raw(tuple(headers), data, export_string, Format.XLS_2007), return export_response( export_string, Format.XLS_2007, '{app_name} v.{app_version} - Form Summary ({lang})'.format( app_name=self.app.name, app_version=self.app.version, lang=language ), )
def save_metadata_export_to_tempfile(domain): """ Saves the domain's form metadata to a file. Returns the filename. """ headers = ("domain", "instanceID", "received_on", "type", "timeStart", "timeEnd", "deviceID", "username", "userID", "xmlns", "version") def _form_data_to_row(formdata): def _key_to_val(formdata, key): if key == "type": return xmlns_to_name(domain, formdata.get("xmlns"), app_id=None) if key == "version": return formdata["form"].get("@version") if key in ["domain", "received_on", "xmlns"]: return formdata.get(key) return formdata["form"].get("meta", {}).get(key) return [_key_to_val(formdata, key) for key in headers] fd, path = tempfile.mkstemp() q = { "query": {"match_all": {}}, "sort": [{"received_on" : {"order": "desc"}}], } results = stream_es_query(params={"domain.exact": domain}, q=q, es_url=XFORM_INDEX + '/xform/_search', size=999999) data = (_form_data_to_row(res["_source"]) for res in results) with os.fdopen(fd, 'w') as temp: export_raw((("forms", headers),), (("forms", data),), temp) return path
def export_all_form_metadata(req, domain): """ Export metadata for _all_ forms in a domain. """ format = req.GET.get("format", Format.XLS_2007) headers = ("domain", "instanceID", "received_on", "type", "timeStart", "timeEnd", "deviceID", "username", "userID", "xmlns", "version") def _form_data_to_row(formdata): def _key_to_val(formdata, key): if key == "type": return xmlns_to_name(domain, formdata.xmlns, app_id=None) else: return getattr(formdata, key) return [_key_to_val(formdata, key) for key in headers] fd, path = tempfile.mkstemp() data = (_form_data_to_row(f) for f in stream_qs( HQFormData.objects.filter(domain=domain).order_by('received_on') )) with os.fdopen(fd, 'w') as temp: export_raw((("forms", headers),), (("forms", data),), temp) return export_response(open(path), format, "%s_forms" % domain)
def get(self, request, domain, app_id): case_metadata = self.app.get_case_metadata() language = request.GET.get('lang', 'en') headers = [(_('All Case Properties'), ('case_type', 'case_property', 'description')), (_('Case Types'), ('type', 'relationships', 'opened_by', 'closed_by'))] headers += list(( case_type.name, tuple(CASE_SUMMARY_EXPORT_HEADER_NAMES) )for case_type in case_metadata.case_types) data = [( _('All Case Properties'), self.get_case_property_rows(case_type) ) for case_type in case_metadata.case_types] data += [self.get_case_type_rows(case_metadata.case_types, language)] data += [( case_type.name, self.get_case_questions_rows(case_type, language) ) for case_type in case_metadata.case_types] export_string = io.BytesIO() export_raw(tuple(headers), data, export_string, Format.XLS_2007), return export_response( export_string, Format.XLS_2007, '{app_name} v.{app_version} - Case Summary ({lang})'.format( app_name=self.app.name, app_version=self.app.version, lang=language ), )
def _get_workbook_from_data(self, headers, rows): file = BytesIO() export_raw(headers, rows, file, format=Format.XLS_2007) with tempfile.TemporaryFile(suffix='.xlsx') as f: f.write(file.getvalue()) f.seek(0) return get_workbook(f)
def download_bulk_app_translations(request, domain, app_id): app = get_app(domain, app_id) headers = expected_bulk_app_sheet_headers(app) rows = expected_bulk_app_sheet_rows(app) temp = StringIO() data = [(k, v) for k, v in rows.iteritems()] export_raw(headers, data, temp) return export_response(temp, Format.XLS_2007, "bulk_app_translations")
def download_item_lists(request, domain): data_types = FixtureDataType.by_domain(domain) data_type_schemas = [] max_fields = 0 max_groups = 0 max_users = 0 mmax_groups = 0 mmax_users = 0 data_tables = [] for data_type in data_types: type_schema = [data_type.name, data_type.tag] fields = [field for field in data_type.fields] type_id = data_type.get_id data_table_of_type = [] for item_row in FixtureDataItem.by_data_type(domain, type_id): group_len = len(item_row.get_groups()) max_groups = group_len if group_len>max_groups else max_groups user_len = len(item_row.get_users()) max_users = user_len if user_len>max_users else max_users for item_row in FixtureDataItem.by_data_type(domain, type_id): groups = [group.name for group in item_row.get_groups()] + ["" for x in range(0,max_groups-len(item_row.get_groups()))] users = [user.raw_username for user in item_row.get_users()] + ["" for x in range(0, max_users-len(item_row.get_users()))] data_row = tuple([str(_id_from_doc(item_row)),"N"]+ [item_row.fields[field] for field in fields]+ groups + users) data_table_of_type.append(data_row) type_schema.extend(fields) data_type_schemas.append(tuple(type_schema)) if max_fields<len(type_schema): max_fields = len(type_schema) data_tables.append((data_type.tag,tuple(data_table_of_type))) mmax_users = max_users if max_users>mmax_users else mmax_users mmax_groups = max_groups if max_groups>mmax_groups else mmax_groups max_users = 0 max_groups = 0 type_headers = ["name", "tag"] + ["field %d" % x for x in range(1, max_fields - 1)] type_headers = ("types", tuple(type_headers)) table_headers = [type_headers] for type_schema in data_type_schemas: item_header = (type_schema[1], tuple(["UID", DELETE_HEADER] + ["field: " + x for x in type_schema[2:]] + ["group %d" % x for x in range(1, mmax_groups + 1)] + ["user %d" % x for x in range(1, mmax_users + 1)])) table_headers.append(item_header) table_headers = tuple(table_headers) type_rows = ("types", tuple(data_type_schemas)) data_tables = tuple([type_rows]+data_tables) fd, path = tempfile.mkstemp() with os.fdopen(fd, 'w') as temp: export_raw((table_headers), (data_tables), temp) format = Format.XLS_2007 return export_response(open(path), format, "%s_fixtures" % domain)
def download_bulk_app_translations(request, domain, app_id): app = get_app(domain, app_id) headers = expected_bulk_app_sheet_headers(app) rows = expected_bulk_app_sheet_rows(app) temp = io.BytesIO() data = [(k, v) for k, v in six.iteritems(rows)] export_raw(headers, data, temp) filename = '{app_name} v.{app_version} - App Translations'.format( app_name=app.name, app_version=app.version) return export_response(temp, Format.XLS_2007, filename)
def setUpClass(cls): cls.app = Application.wrap(cls.get_json("app")) # Todo, refactor this into BulkAppTranslationTestBase.upload_raw_excel_translations file = StringIO() export_raw(cls.excel_headers, cls.excel_data, file, format=Format.XLS_2007) with tempfile.TemporaryFile(suffix='.xlsx') as f: f.write(file.getvalue()) wb_reader = WorkbookJSONReader(f) cls.expected_workbook = [{'name': ws.title, 'rows': list(ws)} for ws in wb_reader.worksheets]
def _build_translation_download_file(self, headers, data=None): if data is None: data = [] translations = get_default_translations_for_download(self.app, 'latest') for translation_key, translation_value in six.iteritems(translations): data.append((translation_key, translation_value)) data = (('translations', tuple(data)),) temp = BytesIO() export_raw(headers, data, temp) temp.seek(0) # .read() is used somewhere so this needs to be at the begininng return temp
def download_multimedia_paths(request, domain, app_id): from corehq.apps.hqmedia.view_helpers import download_multimedia_paths_rows app = get_app(domain, app_id) headers = ((_("Paths"), (_("Old Path"), _("New Path"), _("Usages"))),) rows = download_multimedia_paths_rows(app, only_missing=request.GET.get('only_missing', False)) temp = io.BytesIO() export_raw(headers, rows, temp) filename = '{app_name} v.{app_version} - App Multimedia Paths'.format( app_name=app.name, app_version=app.version) return export_response(temp, Format.XLS_2007, filename)
def download_bulk_app_translations(request, domain, app_id): lang = request.GET.get('lang') app = get_app(domain, app_id) headers = get_bulk_app_sheet_headers(app, lang=lang) sheets = get_bulk_app_single_sheet_by_name(app, lang) if lang else get_bulk_app_sheets_by_name(app) temp = io.BytesIO() data = [(k, v) for k, v in six.iteritems(sheets)] export_raw(headers, data, temp) filename = '{app_name} v.{app_version} - App Translations{lang}'.format( app_name=app.name, app_version=app.version, lang=' ' + lang if lang else '') return export_response(temp, Format.XLS_2007, filename)
def build_ui_translation_download_file(app): properties = tuple(["property"] + app.langs + ["platform"]) temp = StringIO() headers = (("translations", properties),) row_dict = {} for i, lang in enumerate(app.langs): index = i + 1 trans_dict = app.translations.get(lang, {}) for prop, trans in trans_dict.iteritems(): if prop not in row_dict: row_dict[prop] = [prop] num_to_fill = index - len(row_dict[prop]) row_dict[prop].extend(["" for i in range(num_to_fill)] if num_to_fill > 0 else []) row_dict[prop].append(trans) rows = row_dict.values() all_prop_trans = get_default_translations_for_download(app) rows.extend([[t] for t in sorted(all_prop_trans.keys()) if t not in row_dict]) def fillrow(row): num_to_fill = len(properties) - len(row) row.extend(["" for i in range(num_to_fill)] if num_to_fill > 0 else []) return row def add_default(row): row_index = get_index_for_defaults(app.langs) + 1 if not row[row_index]: # If no custom translation exists, replace it. row[row_index] = all_prop_trans.get(row[0], "") return row def add_sources(row): platform_map = { "CommCareAndroid": "Android", "CommCareJava": "Java", "ODK": "Android", "JavaRosa": "Java", } source = system_text_sources.SOURCES.get(row[0], "") row[-1] = platform_map.get(source, "") return row rows = [add_sources(add_default(fillrow(row))) for row in rows] data = (("translations", tuple(rows)),) export_raw(headers, data, temp) return temp
def get(self, request, domain): title = _("Conditional Alerts") headers = ((title, (_('id'), _('name'), _('case_type'))),) rows = [( rule.pk, rule.name, rule.case_type, ) for rule in self.get_conditional_alerts_queryset()] temp = io.BytesIO() export_raw(headers, [(title, rows)], temp) filename = '{title} - {domain}'.format( domain=domain, title=title) return export_response(temp, Format.XLS_2007, filename)
def save_metadata_export_to_tempfile(domain, datespan=None, user_ids=None): """ Saves the domain's form metadata to a file. Returns the filename. """ headers = ("domain", "instanceID", "received_on", "type", "timeStart", "timeEnd", "deviceID", "username", "userID", "xmlns", "version") def _form_data_to_row(formdata): def _key_to_val(formdata, key): if key == "type": return xmlns_to_name(domain, formdata.get("xmlns"), app_id=None) if key == "version": return formdata["form"].get("@version") if key in ["domain", "received_on", "xmlns"]: return formdata.get(key) return formdata["form"].get("meta", {}).get(key) return [_key_to_val(formdata, key) for key in headers] fd, path = tempfile.mkstemp() q = { "query": {"match_all": {}}, "sort": [{"received_on" : {"order": "desc"}}], "filter": {"and": []}, } if datespan: q["query"] = { "range": { "form.meta.timeEnd": { "from": datespan.startdate_param, "to": datespan.enddate_param, "include_upper": False, } } } if user_ids is not None: q["filter"]["and"].append({"terms": {"form.meta.userID": user_ids}}) results = stream_es_query(params={"domain.exact": domain}, q=q, es_url=XFORM_INDEX + '/xform/_search', size=999999) data = (_form_data_to_row(res["_source"]) for res in results) with os.fdopen(fd, 'w') as temp: export_raw((("forms", headers),), (("forms", data),), temp) return path
def domain_list_download(request): domains = Domain.get_all() properties = ("name", "city", "country", "region", "project_type", "customer_type", "is_test?") def _row(domain): def _prop(domain, prop): if prop.endswith("?"): return yesno(getattr(domain, prop[:-1], "")) return getattr(domain, prop, "") return (_prop(domain, prop) for prop in properties) temp = StringIO() headers = (("domains", properties),) data = (("domains", (_row(domain) for domain in domains)),) export_raw(headers, data, temp) return export_response(temp, Format.XLS_2007, "domains")
def handle(self, domain, start_date, end_date, **options): start_timestamp, end_timestamp = self.get_start_and_end_timestamps(start_date, end_date) self.recipient_id_to_location_id = {} self.location_id_to_location = {} self.location_id_to_state_code = {} self.state_code_to_name = {'unknown': 'Unknown'} data = {} filename = 'icds-sms-usage--%s--%s.xlsx' % ( start_date.strftime('%Y-%m-%d'), end_date.strftime('%Y-%m-%d'), ) for sms in SMS.objects.filter( domain=domain, date__gt=start_timestamp, date__lte=end_timestamp, backend_api=AirtelTCLBackend.get_api_id(), direction='O', processed=True, ): location = self.get_location(sms) state_code = self.get_state_code(location) if state_code not in data: data[state_code] = {} indicator_slug = self.get_indicator_slug(sms) if indicator_slug not in data[state_code]: data[state_code][indicator_slug] = 0 data[state_code][indicator_slug] += 1 with open(filename, 'wb') as f: headers = ('State Code', 'State Name', 'Indicator', 'SMS Count') excel_data = [] for state_code, state_data in data.items(): for indicator_slug, count in state_data.items(): excel_data.append((state_code, self.state_code_to_name[state_code], indicator_slug, count)) export_raw( (('icds-sms-usage', headers), ), (('icds-sms-usage', excel_data), ), f )
def export_all_form_metadata(req, domain): """ Export metadata for _all_ forms in a domain. """ format = req.GET.get("format", Format.XLS_2007) headers = ("domain", "instanceID", "received_on", "type", "timeStart", "timeEnd", "deviceID", "username", "userID", "xmlns", "version") def _form_data_to_row(formdata): def _key_to_val(formdata, key): if key == "type": return xmlns_to_name(domain, formdata.xmlns, app_id=None) else: return getattr(formdata, key) return [_key_to_val(formdata, key) for key in headers] temp = StringIO() data = (_form_data_to_row(f) for f in HQFormData.objects.filter(domain=domain)) export_raw((("forms", headers),), (("forms", data),), temp) return export_response(temp, format, "%s_forms" % domain)
def download_sms_translations(request, domain): tdoc = StandaloneTranslationDoc.get_obj(domain, "sms") columns = ["property"] + tdoc.langs + ["default"] msg_ids = sorted(_MESSAGES.keys()) rows = [] for msg_id in msg_ids: rows.append([msg_id]) for lang in tdoc.langs: for row in rows: row.append(tdoc.translations[lang].get(row[0], "")) for row in rows: row.append(_MESSAGES.get(row[0])) temp = StringIO() headers = (("translations", tuple(columns)),) data = (("translations", tuple(rows)),) export_raw(headers, data, temp) return export_response(temp, Format.XLS_2007, "translations")
def prepare_fixture_download(table_ids, domain, task, download_id): """Prepare fixture data for Excel download """ data_types_book, excel_sheets = _prepare_fixture(table_ids, domain, task=task) header_groups = [("types", excel_sheets["types"]["headers"])] value_groups = [("types", excel_sheets["types"]["rows"])] for data_type in data_types_book: header_groups.append((data_type.tag, excel_sheets[data_type.tag]["headers"])) value_groups.append((data_type.tag, excel_sheets[data_type.tag]["rows"])) file = StringIO() format = Format.XLS_2007 export_raw(tuple(header_groups), tuple(value_groups), file, format) return expose_cached_download( file.getvalue(), 60 * 60 * 2, mimetype=Format.from_format(format).mimetype, content_disposition='attachment; filename="%s_fixtures.xlsx"' % domain, download_id=download_id, )
def handle(self, domain, start_date, end_date, **options): self.recipient_details = {} self.location_details = {} start_timestamp, end_timestamp = self.get_start_and_end_timestamps(start_date, end_date) filename = 'icds-sms-export--%s--%s.xlsx' % ( start_date.strftime('%Y-%m-%d'), end_date.strftime('%Y-%m-%d'), ) with open(filename, 'wb') as f: headers = ( 'Date (IST)', 'Phone Number', 'Recipient Name', 'State Name', 'District Name', 'Block Name', 'LS Name', 'AWC Name', 'Text', 'Recipient Type', 'Recipient Id', 'Indicator', 'State Id', 'District Id', 'Block Id', 'LS Id', 'AWC Id', ) data = tuple(self.get_records(domain, start_timestamp, end_timestamp)) export_raw( (('icds-sms-export', headers), ), (('icds-sms-export', data), ), f )
def handle(self, domain, start_date, end_date, **options): self.recipient_details = {} self.location_details = {} start_timestamp, end_timestamp = self.get_start_and_end_timestamps(start_date, end_date) headers = ( 'Date (IST)', 'Phone Number', 'Recipient Name', 'State Name', 'District Name', 'Block Name', 'LS Name', 'AWC Name', 'Text', ) for state_code in ( ANDHRA_PRADESH_SITE_CODE, BIHAR_SITE_CODE, CHHATTISGARH_SITE_CODE, JHARKHAND_SITE_CODE, MADHYA_PRADESH_SITE_CODE, RAJASTHAN_SITE_CODE, ): export_name = self.get_export_name(state_code, start_date, end_date) with open('%s.xlsx' % export_name, 'wb') as f: records = self.get_records(domain, start_timestamp, end_timestamp, indicator_filter=['beneficiary_1', 'beneficiary_2'], state_filter=[state_code]) data = tuple(record[:9] for record in records) export_raw( ((export_name, headers), ), ((export_name, data), ), f )
def test_export_raw(self): headers = (('people', ('name', 'gender')), ('offices', ('location', 'name'))) data = ( ('people', [('danny', 'male'), ('amelia', 'female'), ('carter', 'various')]), ('offices', [('Delhi, India', 'DSI'), ('Boston, USA', 'Dimagi, Inc'), ('Capetown, South Africa', 'DSA')]) ) EXPECTED = {"offices": {"headers": ["location", "name"], "rows": [["Delhi, India", "DSI"], ["Boston, USA", "Dimagi, Inc"], ["Capetown, South Africa", "DSA"]]}, "people": {"headers": ["name", "gender"], "rows": [["danny", "male"], ["amelia", "female"], ["carter", "various"]]}} that = self class Tester(object): def __enter__(self): self.buffer = BytesIO() return self.buffer def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is None: that.assertDictEqual(json.loads(self.buffer.getvalue()), EXPECTED) self.buffer.close() with Tester() as buffer: export_raw(headers, data, buffer, format=Format.JSON) with Tester() as buffer: # test lists export_raw(list(headers), list(data), buffer, format=Format.JSON) with Tester() as buffer: # test generators export_raw((h for h in headers), ((name, (r for r in rows)) for name, rows in data), buffer, format=Format.JSON) with Tester() as buffer: # test export_from_tables headers = dict(headers) data = dict(data) tables = {} for key in set(headers.keys()) | set(data.keys()): tables[key] = itertools.chain([headers[key]], data[key]) export_from_tables(list(tables.items()), buffer, format=Format.JSON)
def get(self, request, domain, app_id): language = request.GET.get('lang', 'en') headers = [(self.app.name, tuple(APP_SUMMARY_EXPORT_HEADER_NAMES))] data = [(self.app.name, [ AppSummaryRow( app=self.app.name, comments=self.app.comment, ) ])] for module in self.app.get_modules(): try: case_list_filter = module.case_details.short.filter except AttributeError: case_list_filter = None data += [ (self.app.name, [ AppSummaryRow( app=self.app.name, module=_get_translated_module_name(self.app, module.unique_id, language), display_filter=module.module_filter, case_type=module.case_type, case_list_filter=case_list_filter, case_actions=module.case_details.short.filter if hasattr(module, 'case_details') else None, filter=module.module_filter, module_type='advanced' if isinstance(module, AdvancedModule) else 'standard', comments=module.comment, parent_module=(_get_translated_module_name(self.app, module.root_module_id, language) if module.root_module_id else '') ) ]) ] for form in module.get_forms(): post_form_workflow = form.post_form_workflow if form.post_form_workflow == WORKFLOW_FORM: post_form_workflow = "form:\n{}".format( "\n".join( ["{form}: {xpath} [{datums}]".format( form=_get_translated_form_name(self.app, link.form_id, language), xpath=link.xpath, datums=", ".join( "{}: {}".format( datum.name, datum.xpath ) for datum in link.datums) ) for link in form.form_links] ) ) data += [ (self.app.name, [ AppSummaryRow( app=self.app.name, module=_get_translated_module_name(self.app, module.unique_id, language), form=_get_translated_form_name(self.app, form.get_unique_id(), language), display_filter=form.form_filter, case_type=form.get_case_type(), case_actions=self._get_form_actions(form), filter=form.form_filter, module_type='advanced' if isinstance(module, AdvancedModule) else 'standard', comments=form.comment, end_of_form_navigation=post_form_workflow, ) ]) ] export_string = io.BytesIO() export_raw(tuple(headers), data, export_string, Format.XLS_2007), return export_response( export_string, Format.XLS_2007, '{app_name} v.{app_version} - App Summary ({lang})'.format( app_name=self.app.name, app_version=self.app.version, lang=language ), )
def get_worksheet(self, title): string_io = StringIO() export_raw(self.headers, self.data, string_io, format=Format.XLS_2007) string_io.seek(0) workbook = WorkbookJSONReader(string_io) # __init__ will read string_io return workbook.worksheets_by_title[title]
def download_item_lists(request, domain): data_types = FixtureDataType.by_domain(domain) data_type_schemas = [] max_fields = 0 max_groups = 0 max_users = 0 mmax_groups = 0 mmax_users = 0 data_tables = [] def _get_empty_list(length): return ["" for x in range(0, length)] # Fills sheets' schemas and data for data_type in data_types: type_schema = [str(_id_from_doc(data_type)), "N", data_type.name, data_type.tag, yesno(data_type.is_global)] fields = [field for field in data_type.fields] type_id = data_type.get_id data_table_of_type = [] for item_row in FixtureDataItem.by_data_type(domain, type_id): group_len = len(item_row.get_groups()) max_groups = group_len if group_len>max_groups else max_groups user_len = len(item_row.get_users()) max_users = user_len if user_len>max_users else max_users for item_row in FixtureDataItem.by_data_type(domain, type_id): groups = [group.name for group in item_row.get_groups()] + _get_empty_list(max_groups - len(item_row.get_groups())) users = [user.raw_username for user in item_row.get_users()] + _get_empty_list(max_users - len(item_row.get_users())) data_row = tuple([str(_id_from_doc(item_row)), "N"] + [item_row.fields.get(field, None) or "" for field in fields] + groups + users) data_table_of_type.append(data_row) type_schema.extend(fields) data_type_schemas.append(tuple(type_schema)) if max_fields<len(type_schema): max_fields = len(type_schema) data_tables.append((data_type.tag,tuple(data_table_of_type))) mmax_users = max_users if max_users>mmax_users else mmax_users mmax_groups = max_groups if max_groups>mmax_groups else mmax_groups max_users = 0 max_groups = 0 type_headers = ["UID", DELETE_HEADER, "name", "tag", 'is_global?'] + ["field %d" % x for x in range(1, max_fields - 4)] type_headers = ("types", tuple(type_headers)) table_headers = [type_headers] for type_schema in data_type_schemas: item_header = (type_schema[3], tuple(["UID", DELETE_HEADER] + ["field: " + x for x in type_schema[5:]] + ["group %d" % x for x in range(1, mmax_groups + 1)] + ["user %d" % x for x in range(1, mmax_users + 1)])) table_headers.append(item_header) table_headers = tuple(table_headers) type_rows = ("types", tuple(data_type_schemas)) data_tables = tuple([type_rows]+data_tables) """ Example of sheets preperation: headers: (("employee", ("id", "name", "gender")), ("building", ("id", "name", "address"))) data: (("employee", (("1", "cory", "m"), ("2", "christian", "m"), ("3", "amelia", "f"))), ("building", (("1", "dimagi", "585 mass ave."), ("2", "old dimagi", "529 main st.")))) """ fd, path = tempfile.mkstemp() with os.fdopen(fd, 'w') as temp: export_raw((table_headers), (data_tables), temp) format = Format.XLS_2007 return export_response(open(path), format, "%s_fixtures" % domain)