def get_tour_data(self, request, step): from corehq.apps.dashboard.views import DomainDashboardView from corehq.apps.reports.views import MySavedReportsView from corehq.apps.userreports.views import ReportBuilderPaywall data = super(ReportBuilderTour, self).get_tour_data() data.update({ 'step': step, 'has_rb_access': has_report_builder_access(request), 'dashboard_url': reverse( DomainDashboardView.urlname, args=[request.domain] ), 'reports_home_url': reverse( MySavedReportsView.urlname, args=[request.domain], params={'tour': True} ), 'report_builder_paywall_url': reverse( ReportBuilderPaywall.urlname, args=[request.domain], params={'tour': True} ), }) return data
def paywall_home(domain): """ Return the url for the page in the report builder paywall that users in the given domain should be directed to upon clicking "+ Create new report" """ project = Domain.get_by_name(domain, strict=True) if project.requested_report_builder_subscription: return reverse(ReportBuilderPaywallActivatingSubscription.urlname, args=[domain]) else: return reverse(ReportBuilderPaywallPricing.urlname, args=[domain])
def get(self, request, *args, **kwargs): context = super(DownloadLocationStatusView, self).main_context context.update({ 'domain': self.domain, 'download_id': kwargs['download_id'], 'poll_url': reverse('org_download_job_poll', args=[self.domain, kwargs['download_id']]), 'title': _("Download Organization Structure Status"), 'progress_text': _("Preparing organization structure download."), 'error_text': _("There was an unexpected error! Please try again or report an issue."), 'next_url': reverse(LocationsListView.urlname, args=[self.domain]), 'next_url_text': _("Go back to organization structure"), }) return render(request, 'hqwebapp/soil_status_full.html', context)
def resume_building_data_source(request, domain, config_id): config, is_static = get_datasource_config_or_404(config_id, domain) if not is_static and config.meta.build.finished: messages.warning( request, _(u'Table "{}" has already finished building. Rebuild table to start over.').format( config.display_name ) ) elif not DataSourceResumeHelper(config).has_resume_info(): messages.warning( request, _(u'Table "{}" did not finish building but resume information is not available. ' u'Unfortunately, this means you need to rebuild the table.').format( config.display_name ) ) else: messages.success( request, _(u'Resuming rebuilding table "{}".').format(config.display_name) ) resume_building_indicators.delay(config_id, request.user.username) return HttpResponseRedirect(reverse( EditDataSourceView.urlname, args=[domain, config._id] ))
def post(self, request, *args, **kwargs): if self.form.is_valid(): app_source = self.form.get_selected_source() url_names_map = { 'list': 'configure_list_report', 'chart': 'configure_chart_report', 'table': 'configure_table_report', 'worker': 'configure_worker_report', 'map': 'configure_map_report', } url_name = url_names_map[self.report_type] get_params = { 'report_name': self.form.cleaned_data['report_name'], 'chart_type': self.form.cleaned_data['chart_type'], 'application': app_source.application, 'source_type': app_source.source_type, 'source': app_source.source, } track_workflow( request.user.email, "Successfully submitted the first part of the Report Builder " "wizard where you give your report a name and choose a data source" ) add_event(request, [ "Report Builder", "Successful Click on Next (Data Source)", app_source.source_type, ]) return HttpResponseRedirect( reverse(url_name, args=[self.domain], params=get_params) ) else: return self.get(request, *args, **kwargs)
def post(self, *args, **kwargs): if self.report_form.is_valid(): existing_sum_avg_cols = [] if self.report_form.existing_report: try: existing_sum_avg_cols = self._get_sum_avg_columns( self.report_form.existing_report.columns ) report_configuration = self.report_form.update_report() except ValidationError as e: messages.error(self.request, e.message) return self.get(*args, **kwargs) else: self._confirm_report_limit() report_configuration = self.report_form.create_report() self._track_new_report_events() self._track_valid_form_events(existing_sum_avg_cols, report_configuration) return HttpResponseRedirect( reverse(ConfigurableReport.slug, args=[self.domain, report_configuration._id]) ) else: self._track_invalid_form_events() return self.get(*args, **kwargs)
def delete_report(request, domain, report_id): if not (toggle_enabled(request, toggles.USER_CONFIGURABLE_REPORTS) or toggle_enabled(request, toggles.REPORT_BUILDER) or toggle_enabled(request, toggles.REPORT_BUILDER_BETA_GROUP) or has_report_builder_add_on_privilege(request)): raise Http404() config = get_document_or_404(ReportConfiguration, domain, report_id) # Delete the data source too if it's not being used by any other reports. try: data_source, __ = get_datasource_config(config.config_id, domain) except DataSourceConfigurationNotFoundError: # It's possible the data source has already been deleted, but that's fine with us. pass else: if data_source.get_report_count() <= 1: # No other reports reference this data source. data_source.deactivate() config.delete() did_purge_something = purge_report_from_mobile_ucr(config) messages.success(request, _(u'Report "{}" deleted!').format(config.title)) if did_purge_something: messages.warning( request, _(u"This report was used in one or more applications. " "It has been removed from there too.") ) redirect = request.GET.get("redirect", None) if not redirect: redirect = reverse('configurable_reports_home', args=[domain]) return HttpResponseRedirect(redirect)
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) if not args: messages.error(request, _('no domain specified')) return self.get(request, *args, **kwargs) if upload.content_type != 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': messages.error(request, _("Invalid file-format. Please upload a valid xlsx file.")) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery ONE_HOUR = 1*60*60 file_ref = expose_cached_download( upload.read(), expiry=ONE_HOUR, file_extension=file_extention_from_filename(upload.name), ) task = import_locations_async.delay( domain, file_ref.download_id, ) # put the file_ref.download_id in cache to lookup from elsewhere cache.set(import_locations_task_key(domain), file_ref.download_id, ONE_HOUR) file_ref.set_task(task) return HttpResponseRedirect( reverse( LocationImportStatusView.urlname, args=[domain, file_ref.download_id] ) )
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) if not args: messages.error(request, _('no domain specified')) return self.get(request, *args, **kwargs) if upload.content_type != 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': messages.error(request, _("Invalid file-format. Please upload a valid xlsx file.")) return self.get(request, *args, **kwargs) domain = args[0] ref = self._cache_file(request, domain, upload) if not isinstance(ref, LocationImportView.Ref): # ref is HTTP response: lock could not be acquired return ref file_ref = ref.value task = import_locations_async.delay(domain, file_ref.download_id, request.couch_user.user_id) file_ref.set_task(task) return HttpResponseRedirect( reverse( LocationImportStatusView.urlname, args=[domain, file_ref.download_id] ) )
def _handle_exception(self, response, exception): if self.existing_report and self.existing_report.report_meta.edited_manually: error_message_base = _( 'It looks like this report was edited by hand and is no longer editable in report builder.' ) if toggle_enabled(self.request, toggles.USER_CONFIGURABLE_REPORTS): error_message = '{} {}'.format(error_message_base, _( 'You can edit the report manually using the <a href="{}">advanced UI</a>.' ).format(reverse(EditConfigReportView.urlname, args=[self.domain, self.existing_report._id]))) else: error_message = '{} {}'.format( error_message_base, _('You can delete and recreate this report using the button below, or ' 'report an issue if you believe you are seeing this page in error.') ) response['error_message'] = error_message return response elif isinstance(exception, DataSourceConfigurationNotFoundError): response['details'] = None response['error_message'] = '{} {}'.format( str(exception), DATA_SOURCE_NOT_FOUND_ERROR_MESSAGE ) return response else: raise
def post(self, request, *args, **kwargs): upload = request.FILES.get('bulk_upload_file') if not upload: messages.error(request, _('no file uploaded')) return self.get(request, *args, **kwargs) if not args: messages.error(request, _('no domain specified')) return self.get(request, *args, **kwargs) domain = args[0] # stash this in soil to make it easier to pass to celery file_ref = expose_cached_download(upload.read(), expiry=1*60*60) task = import_locations_async.delay( domain, file_ref.download_id, ) file_ref.set_task(task) return HttpResponseRedirect( reverse( LocationImportStatusView.urlname, args=[domain, file_ref.download_id] ) )
def filter_context(self): api_root = reverse('api_dispatch_list', params={'show_administrative': False}, kwargs={'domain': self.domain, 'resource_name': 'ews_location', 'api_name': 'v0.3'}) user = self.request.couch_user loc_id = self.request.GET.get('location_id') if not loc_id: domain_membership = user.get_domain_membership(self.domain) if not domain_membership or not domain_membership.location_id: loc_id = SQLLocation.objects.filter( domain=self.domain, location_type__name='country' ).first().location_id else: loc_id = domain_membership.location_id location = get_object_or_404(SQLLocation, location_id=loc_id) if not location.location_type.administrative: loc_id = location.parent.location_id hier = location_hierarchy_config(self.domain) hierarchy = [] for h in hier: if h[0] not in self.reporting_types(): hierarchy.append(h) return { 'api_root': api_root, 'control_name': self.label, 'control_slug': self.slug, 'loc_id': loc_id, 'locations': load_locs_json(self.domain, loc_id, only_administrative=self.only_administrative), 'hierarchy': hierarchy }
def form_valid(self): messages.success(self.request, _('Location saved!')) return HttpResponseRedirect( reverse(EditLocationView.urlname, args=[self.domain, self.location_form.location.location_id]), )
def post(self, request, *args, **kwargs): if self.edit_form.is_valid(): self.edit_form.save(commit=True) messages.success(request, _(u'Report "{}" saved!').format(self.config.title)) return HttpResponseRedirect(reverse( 'edit_configurable_report', args=[self.domain, self.config._id]) ) return self.get(request, *args, **kwargs)
def main_context(self): context = super(BaseLocationView, self).main_context context.update({ 'hierarchy': location_hierarchy_config(self.domain), 'api_root': reverse('api_dispatch_list', kwargs={'domain': self.domain, 'resource_name': 'location', 'api_name': 'v0.3'}), }) return context
def export_data_source(request, domain, config_id): config, _ = get_datasource_config_or_404(config_id, domain) adapter = IndicatorSqlAdapter(config) q = adapter.get_query_object() table = adapter.get_table() try: params = process_url_params(request.GET, table.columns) allowed_formats = [ Format.CSV, Format.HTML, Format.XLS, Format.XLS_2007, ] if params.format not in allowed_formats: msg = ugettext_lazy('format must be one of the following: {}').format(', '.join(allowed_formats)) return HttpResponse(msg, status=400) except UserQueryError as e: return HttpResponse(e.message, status=400) q = q.filter_by(**params.keyword_filters) for sql_filter in params.sql_filters: q = q.filter(sql_filter) # xls format has limit of 65536 rows # First row is taken up by headers if params.format == Format.XLS and q.count() >= 65535: keyword_params = dict(**request.GET) # use default format if 'format' in keyword_params: del keyword_params['format'] return HttpResponseRedirect( '%s?%s' % ( reverse('export_configurable_data_source', args=[domain, config._id]), urlencode(keyword_params) ) ) # build export def get_table(q): yield table.columns.keys() for row in q: yield row fd, path = tempfile.mkstemp() with os.fdopen(fd, 'wb') as tmpfile: try: tables = [[config.table_id, get_table(q)]] export_from_tables(tables, tmpfile, params.format) except exc.DataError: msg = ugettext_lazy( "There was a problem executing your query, " "please make sure your parameters are valid." ) return HttpResponse(msg, status=400) return export_response(Temp(path), params.format, config.display_name)
def location_export(request, domain): if not LocationType.objects.filter(domain=domain).exists(): messages.error(request, _("You need to define location types before " "you can do a bulk import or export.")) return HttpResponseRedirect(reverse(LocationsListView.urlname, args=[domain])) include_consumption = request.GET.get('include_consumption') == 'true' response = HttpResponse(content_type=Format.from_format('xlsx').mimetype) response['Content-Disposition'] = 'attachment; filename="locations.xlsx"' dump_locations(response, domain, include_consumption) return response
def post(self, request, *args, **kwargs): if self.edit_form.is_valid(): config = self.edit_form.save(commit=True) messages.success(request, _(u'Data source "{}" saved!').format( config.display_name )) if self.config_id is None: return HttpResponseRedirect(reverse( EditDataSourceView.urlname, args=[self.domain, config._id]) ) return self.get(request, *args, **kwargs)
def get(self, request, *args, **kwargs): context = super(LocationImportStatusView, self).main_context context.update({ 'domain': self.domain, 'download_id': kwargs['download_id'], 'poll_url': reverse('location_importer_job_poll', args=[self.domain, kwargs['download_id']]), 'title': _("Location Import Status"), 'progress_text': _("Importing your data. This may take some time..."), 'error_text': _("Problem importing data! Please try again or report an issue."), }) return render(request, self.template_name, context)
def post(self, request, *args, **kwargs): payload = json.loads(request.POST.get('json')) sql_loc_types = {} def _is_fake_pk(pk): return isinstance(pk, basestring) and pk.startswith("fake-pk-") def mk_loctype(name, parent_type, administrative, shares_cases, view_descendants, pk, code, expand_from, expand_from_root, expand_to): parent = sql_loc_types[parent_type] if parent_type else None loc_type = None if not _is_fake_pk(pk): try: loc_type = LocationType.objects.get(domain=self.domain, pk=pk) except LocationType.DoesNotExist: pass if loc_type is None: loc_type = LocationType(domain=self.domain) loc_type.name = name loc_type.administrative = administrative loc_type.parent_type = parent loc_type.shares_cases = shares_cases loc_type.view_descendants = view_descendants loc_type.code = unicode_slug(code) sql_loc_types[pk] = loc_type loc_type.save() loc_types = payload['loc_types'] pks = [] for loc_type in loc_types: for prop in ['name', 'parent_type', 'administrative', 'shares_cases', 'view_descendants', 'pk']: assert prop in loc_type, "Missing an organization level property!" pk = loc_type['pk'] if not _is_fake_pk(pk): pks.append(loc_type['pk']) hierarchy = self.get_hierarchy(loc_types) if not self.remove_old_location_types(pks): return self.get(request, *args, **kwargs) for loc_type in hierarchy: # make all locations in order mk_loctype(**loc_type) for loc_type in hierarchy: # apply sync boundaries (expand_from and expand_to) after the # locations are all created since there are dependencies between them self._attach_sync_boundaries_to_location_type(loc_type, sql_loc_types) return HttpResponseRedirect(reverse(self.urlname, args=[self.domain]))
def parent_pages(self): selected = self.location._id or self.location.parent_id breadcrumbs = [{ 'title': LocationsListView.page_title, 'url': reverse( LocationsListView.urlname, args=[self.domain], params={"selected": selected} if selected else None, ) }] if self.location.parent: sql_parent = self.location.parent.sql_location for loc in sql_parent.get_ancestors(include_self=True): breadcrumbs.append({ 'title': loc.name, 'url': reverse( EditLocationView.urlname, args=[self.domain, loc.location_id], ) }) return breadcrumbs
def decorated(request, domain, *args, **kwargs): try: return fn(request, domain, *args, **kwargs) except ProgrammingError as e: if settings.DEBUG: raise messages.error( request, _('There was a problem processing your request. ' 'If you have recently modified your report data source please try again in a few minutes.' '<br><br>Technical details:<br>{}'.format(e)), extra_tags='html', ) return HttpResponseRedirect(reverse('configurable_reports_home', args=[domain]))
def unassign_users(request, domain): """ Unassign all users from their locations. This is for downgraded domains. """ for user in get_users_assigned_to_locations(domain): if user.is_web_user(): user.unset_location(domain) elif user.is_commcare_user(): user.unset_location() messages.success(request, _("All users have been unassigned from their locations")) fallback_url = reverse('users_default', args=[domain]) return HttpResponseRedirect(request.POST.get('redirect', fallback_url))
def location_export(request, domain): headers_only = request.GET.get('download_type', 'full') == 'empty' if not request.can_access_all_locations and not headers_only: return no_permissions(request) if not LocationType.objects.filter(domain=domain).exists(): messages.error(request, _("You need to define organization levels before " "you can do a bulk import or export.")) return HttpResponseRedirect(reverse(LocationsListView.urlname, args=[domain])) include_consumption = request.GET.get('include_consumption') == 'true' download = DownloadBase() res = download_locations_async.delay(domain, download.download_id, include_consumption, headers_only) download.set_task(res) return redirect(DownloadLocationStatusView.urlname, domain, download.download_id)
def new_fn(request, domain, *args, **kwargs): download_id = pending_location_import_download_id(domain) if download_id: status_url = reverse('location_import_status', args=[domain, download_id]) if redirect: # redirect to import status page return HttpResponseRedirect(status_url) else: messages.warning(request, mark_safe( _("Organizations can't be edited until " "<a href='{}''>current bulk upload</a> " "has finished.").format(status_url) )) return view_fn(request, domain, *args, **kwargs) else: return view_fn(request, domain, *args, **kwargs)
def post(self, request, *args, **kwargs): try: json_spec = json.loads(self.spec) if '_id' in json_spec: del json_spec['_id'] json_spec['domain'] = self.domain report = ReportConfiguration.wrap(json_spec) report.validate() report.save() messages.success(request, _('Report created!')) return HttpResponseRedirect(reverse( EditConfigReportView.urlname, args=[self.domain, report._id] )) except (ValueError, BadSpecError) as e: messages.error(request, _('Bad report source: {}').format(e)) return self.get(request, *args, **kwargs)
def dispatch(self, request, *args, **kwargs): report_id = kwargs['report_id'] report = get_document_or_404(ReportConfiguration, request.domain, report_id) if report.report_meta.created_by_builder: view_class = { 'chart': ConfigureChartReport, 'list': ConfigureListReport, 'worker': ConfigureWorkerReport, 'table': ConfigureTableReport, 'map': ConfigureMapReport, }[report.report_meta.builder_report_type] try: return view_class.as_view(existing_report=report)(request, *args, **kwargs) except BadBuilderConfigError as e: messages.error(request, e.message) return HttpResponseRedirect(reverse(ConfigurableReport.slug, args=[request.domain, report_id])) raise Http404("Report was not created by the report builder")
def rebuild_data_source(request, domain, config_id): config, is_static = get_datasource_config_or_404(config_id, domain) if config.is_deactivated: config.is_deactivated = False config.save() messages.success( request, _('Table "{}" is now being rebuilt. Data should start showing up soon').format( config.display_name ) ) rebuild_indicators.delay(config_id, request.user.username) return HttpResponseRedirect(reverse( EditDataSourceView.urlname, args=[domain, config._id] ))
def test_remote_linked_app(self, fake_case_search_config_getter): url = reverse('linked_domain:case_search_config', args=[self.domain]) headers = self.auth_headers.copy() headers[REMOTE_REQUESTER_HEADER] = self.domain_link.linked_domain resp = self.client.get(url, **headers) fake_case_search_config_getter.return_value = json.loads(resp.content) update_case_search_config(self.domain_link) new_search_config = (CaseSearchConfig.objects .prefetch_related('fuzzy_properties') .get(domain=self.domain_link.linked_domain)) self.assertTrue(new_search_config.enabled) self.assertEqual( set(new_search_config.fuzzy_properties.all()[0].properties), set(self.search_config.fuzzy_properties.all()[0].properties) )
def test_remote_auth(self): url = reverse('linked_domain:toggles', args=[self.master_domain]) resp = self.client.get(url) self.assertEqual(resp.status_code, 401) resp = self.client.get(url, **self.auth_headers) self.assertEqual(resp.status_code, 400) headers = self.auth_headers.copy() headers[REMOTE_REQUESTER_HEADER] = 'wrong' resp = self.client.get(url, **headers) self.assertEqual(resp.status_code, 403) headers[REMOTE_REQUESTER_HEADER] = self.linked_domain_requester resp = self.client.get(url, **headers) self.assertEqual(resp.status_code, 200) resp_json = json.loads(resp.content) self.assertEqual(resp_json, {'toggles': [], 'previews': []})
def dispatch(self, *args, **kwargs): if not users_have_locations(self.domain): # irrelevant, redirect redirect_url = reverse('users_default', args=[self.domain]) return HttpResponseRedirect(redirect_url) return super(DowngradeLocationsView, self).dispatch(*args, **kwargs)
def page_url(self): return reverse(self.urlname, args=( self.domain, self.config_id, ))
def section_url(self): return reverse(UserConfigReportsHomeView.urlname, args=(self.domain, ))
def page_url(self): return reverse(self.urlname, args=[self.domain, self.location_id])
def post(self, request, *args, **kwargs): payload = json.loads(request.POST.get('json')) sql_loc_types = {} def _is_fake_pk(pk): return isinstance(pk, basestring) and pk.startswith("fake-pk-") def mk_loctype(name, parent_type, administrative, shares_cases, view_descendants, pk, code, **kwargs): parent = sql_loc_types[parent_type] if parent_type else None loc_type = None if not _is_fake_pk(pk): try: loc_type = LocationType.objects.get(domain=self.domain, pk=pk) except LocationType.DoesNotExist: pass if loc_type is None: loc_type = LocationType(domain=self.domain) loc_type.name = name loc_type.administrative = administrative loc_type.parent_type = parent loc_type.shares_cases = shares_cases loc_type.view_descendants = view_descendants loc_type.code = unicode_slug(code) sql_loc_types[pk] = loc_type loc_type.save() loc_types = payload['loc_types'] pks = [] for loc_type in loc_types: for prop in ['name', 'parent_type', 'administrative', 'shares_cases', 'view_descendants', 'pk']: if prop not in loc_type: raise LocationConsistencyError("Missing an organization level property!") pk = loc_type['pk'] if not _is_fake_pk(pk): pks.append(loc_type['pk']) names = [lt['name'] for lt in loc_types] names_are_unique = len(names) == len(set(names)) codes = [lt['code'] for lt in loc_types if lt['code']] codes_are_unique = len(codes) == len(set(codes)) if not names_are_unique or not codes_are_unique: raise LocationConsistencyError("'name' and 'code' are supposed to be unique") hierarchy = self.get_hierarchy(loc_types) if not self.remove_old_location_types(pks): return self.get(request, *args, **kwargs) for loc_type in hierarchy: # make all locations in order mk_loctype(**loc_type) for loc_type in hierarchy: # apply sync boundaries (expand_from, expand_to and include_without_expanding) after the # locations are all created since there are dependencies between them self._attach_sync_boundaries_to_location_type(loc_type, sql_loc_types) return HttpResponseRedirect(reverse(self.urlname, args=[self.domain]))
def get_tour_data(self): return { 'slug': self.slug, 'template': self.template, 'endUrl': reverse(EndTourView.urlname, args=(self.slug, )), }
def page_context(self): return { 'hide_filters': True, 'form': AuthenticateAsForm(initial=self.request.POST), 'root_page_url': reverse('authenticate_as'), }
def setUpClass(cls): cls.domain = uuid.uuid4().hex cls.project = create_domain(cls.domain) cls.user = CommCareUser.create(cls.domain, 'mtest', 'abc') cls.auth_header = _get_auth_header('mtest', 'abc') cls.restore_uri = reverse('ota_restore', args=[cls.domain])
def delete_data_source(request, domain, config_id): delete_data_source_shared(domain, config_id, request) return HttpResponseRedirect( reverse('configurable_reports_home', args=[domain]))
def page_url(self): return reverse(self.urlname, args=self.args, kwargs=self.kwargs)
def get_custom_url(cls, domain): from custom.enikshay.integrations.bets.views import BETSAYUSHReferralRepeaterView return reverse(BETSAYUSHReferralRepeaterView.urlname, args=[domain])
def tiles(self): clicked_tile = "Clicked on Report Builder Tile" tiles = [ TileConfiguration( title=_('Chart'), slug='chart', analytics_usage_label="Chart", analytics_workflow_labels=[ clicked_tile, "Clicked Chart Tile", ], icon='fcc fcc-piegraph-report', context_processor_class=IconContext, url=reverse('report_builder_select_source', args=[self.domain, 'chart']), help_text=_( 'A bar graph or a pie chart to show data from your cases or forms.' ' You choose the property to graph.'), ), TileConfiguration( title=_('Form or Case List'), slug='form-or-case-list', analytics_usage_label="List", analytics_workflow_labels=[ clicked_tile, "Clicked Form or Case List Tile" ], icon='fcc fcc-form-report', context_processor_class=IconContext, url=reverse('report_builder_select_source', args=[self.domain, 'list']), help_text=_('A list of cases or form submissions.' ' You choose which properties will be columns.'), ), TileConfiguration( title=_('Worker Report'), slug='worker-report', analytics_usage_label="Worker", analytics_workflow_labels=[ clicked_tile, "Clicked Worker Report Tile", ], icon='fcc fcc-user-report', context_processor_class=IconContext, url=reverse('report_builder_select_source', args=[self.domain, 'worker']), help_text=_( 'A table of your mobile workers.' ' You choose which properties will be the columns.'), ), TileConfiguration( title=_('Data Table'), slug='data-table', analytics_usage_label="Table", analytics_workflow_labels=[ clicked_tile, "Clicked Data Table Tile" ], icon='fcc fcc-datatable-report', context_processor_class=IconContext, url=reverse('report_builder_select_source', args=[self.domain, 'table']), help_text=_( 'A table of aggregated data from form submissions or case properties.' ' You choose the columns and rows.'), ), TileConfiguration( title=_('Map'), slug='map', analytics_usage_label="Map", analytics_workflow_labels=[clicked_tile], icon='fcc fcc-globe', context_processor_class=IconContext, url=reverse('report_builder_select_source', args=[self.domain, 'map']), help_text=_('A map to show data from your cases or forms.' ' You choose the property to map.'), ) ] return tiles
def get_custom_url(cls, domain): from custom.enikshay.integrations.bets.views import BETSSuccessfulTreatmentRepeaterView return reverse(BETSSuccessfulTreatmentRepeaterView.urlname, args=[domain])
def get_custom_url(cls, domain): from custom.enikshay.integrations.bets.views import LabBETSVoucherRepeaterView return reverse(LabBETSVoucherRepeaterView.urlname, args=[domain])
def default(request): return HttpResponseRedirect( reverse('admin_report_dispatcher', args=('domains', )))
def section_url(self): return reverse('settings_default', args=(self.domain, ))
def raw_couch(request): get_params = dict(request.GET.iteritems()) return HttpResponseRedirect(reverse("raw_doc", params=get_params))
def section_url(self): return reverse(LocationsListView.urlname, args=[self.domain])
def page_url(self): return reverse(self.urlname)
def section_url(self): return reverse('default_admin_report')
def default(request, domain): if request.couch_user.can_edit_locations(): return HttpResponseRedirect(reverse(LocationsListView.urlname, args=[domain])) elif user_can_edit_location_types(request.couch_user, request.project): return HttpResponseRedirect(reverse(LocationTypesView.urlname, args=[domain])) return no_permissions(request)
def redirect_url(self): return '{}?q={}'.format(reverse('web_user_lookup'), self.username)
def post(self, request, *args, **kwargs): payload = json.loads(request.POST.get('json')) sql_loc_types = {} def _is_fake_pk(pk): return isinstance(pk, str) and pk.startswith("fake-pk-") def mk_loctype(name, parent_type, administrative, shares_cases, view_descendants, pk, code, **kwargs): parent = sql_loc_types[parent_type] if parent_type else None loc_type = None if not _is_fake_pk(pk): try: loc_type = LocationType.objects.get(domain=self.domain, pk=pk) except LocationType.DoesNotExist: pass if loc_type is None: loc_type = LocationType(domain=self.domain) loc_type.name = name loc_type.administrative = administrative loc_type.parent_type = parent loc_type.shares_cases = shares_cases loc_type.view_descendants = view_descendants loc_type.code = unicode_slug(code) sql_loc_types[pk] = loc_type loc_type.save() def unique_name_and_code(): current_location_types = LocationType.objects.by_domain(request.domain) for location_type in current_location_types: if location_type.pk in payload_loc_type_name_by_pk: # to check if the name/code was swapped with another location by confirming if # either name/code has changed but the current name is still present in the names/codes passed if ( (location_type.name != payload_loc_type_name_by_pk.get(location_type.pk) and location_type.name in names) or (location_type.code != payload_loc_type_code_by_pk.get(location_type.pk) and location_type.code in codes) ): return False return True loc_types = payload['loc_types'] pks = [] payload_loc_type_name_by_pk = {} payload_loc_type_code_by_pk = {} for loc_type in loc_types: for prop in ['name', 'parent_type', 'administrative', 'shares_cases', 'view_descendants', 'pk']: if prop not in loc_type: raise LocationConsistencyError("Missing an organization level property!") pk = loc_type['pk'] if not _is_fake_pk(pk): pks.append(loc_type['pk']) loc_type['name'] = loc_type['name'].strip() payload_loc_type_name_by_pk[loc_type['pk']] = loc_type['name'] if loc_type.get('code'): payload_loc_type_code_by_pk[loc_type['pk']] = loc_type['code'] names = list(payload_loc_type_name_by_pk.values()) names_are_unique = len(names) == len(set(names)) codes = list(payload_loc_type_code_by_pk.values()) codes_are_unique = len(codes) == len(set(codes)) if not names_are_unique or not codes_are_unique: raise LocationConsistencyError("'name' and 'code' are supposed to be unique") if not unique_name_and_code(): messages.error(request, LocationConsistencyError(_( "Looks like you are assigning a location name/code to a different location " "in the same request. Please do this in two separate updates by using a " "temporary name to free up the name/code to be re-assigned.")) ) return self.get(request, *args, **kwargs) hierarchy = self.get_hierarchy(loc_types) if not self.remove_old_location_types(pks): return self.get(request, *args, **kwargs) for loc_type in hierarchy: # make all locations in order mk_loctype(**loc_type) for loc_type in hierarchy: # apply sync boundaries (expand_from, expand_to and include_without_expanding) after the # locations are all created since there are dependencies between them self._attach_sync_boundaries_to_location_type(loc_type, sql_loc_types) return HttpResponseRedirect(reverse(self.urlname, args=[self.domain]))
def section_url(self): return reverse(MigrationView.urlname, args=(self.domain, ))
def section_url(self): return reverse(ReportBuilderTypeSelect.urlname, args=[self.domain])
def default(request, domain): return HttpResponseRedirect(reverse(LocationsListView.urlname, args=[domain]))
def page_context(self): return { 'form': LocationFilterForm(self.request.GET, domain=self.domain, user=self.request.couch_user), 'locations_count_url': reverse('locations_count', args=[self.domain]) }
def page_url(self): return reverse(self.urlname, args=(self.domain, ))
def get_custom_url(cls, domain): from custom.enikshay.integrations.bets.views import BETSDiagnosisAndNotificationRepeaterView return reverse(BETSDiagnosisAndNotificationRepeaterView.urlname, args=[domain])