def get_entity_children(request, parent_uuid=None): """ generic view to build json results of entities children list """ return HttpResponse(json.dumps( [Entity.get_or_none(e.uuid).to_dict() for e in Entity.get_or_none(parent_uuid).get_children()]), content_type='application/json')
def get_entity_children(request, parent_uuid=None): """ generic view to build json results of entities children list """ return HttpResponse(json.dumps([ Entity.get_or_none(e.uuid).to_dict() for e in Entity.get_or_none(parent_uuid).get_children() ]), content_type='application/json')
def import_entity(self, eid, as_parent=False): logger.info("Importing #{}".format(eid)) entity = Entity.get_by_id(eid) if entity: logger.debug("Already in DB.") return entity jsd = json.load(open(os.path.join(self.json_folder, "{}.json".format(eid)))) if self.enforced_level is not None \ and self.enforced_level < jsd.get('level'): logger.warning("Exiting. not at proper level") return all_groups = [oug['name'] for oug in jsd.get('organisationUnitGroups', [])] groups = [group for group in all_groups if group in Entity.TYPES.values()] if not groups: if 'DPS' in all_groups: groups.append("Division Provinciale de la Santé") else: logger.warning("Requested an unwanted entity (bad group)") return etype = [k for k, v in Entity.TYPES.items() if v == groups[0]][0] if etype is None: raise ValueError("Unable to guess type") if jsd.get('parent'): parent_id = jsd.get('parent').get('id') parent = Entity.get_by_id(parent_id) if parent is None: parent = self.import_entity(parent_id) if parent is None: raise ValueError("Unable to retrieve parent") else: parent = None entity = Entity.objects.create( uuid=jsd.get('uuid'), code=jsd.get('code'), name=jsd.get('name'), short_name=jsd.get('shortName'), display_name=jsd.get('displayName'), dhis_level=jsd.get('level'), dhis_id=jsd.get('id'), etype=etype, parent=parent) return entity
def upload_guide(request, *args, **kwargs): context = {'page': 'upload'} context.update({ 'root': Entity.get_root(), 'provinces': Entity.get_root().get_children(), }) return render(request, kwargs.get('template_name', 'upload_guide.html'), context)
def import_entity(self, eid, as_parent=False): logger.info("Importing #{}".format(eid)) entity = Entity.get_by_id(eid) if entity: logger.debug("Already in DB.") return entity jsd = json.load( open(os.path.join(self.json_folder, "{}.json".format(eid)))) if self.enforced_level is not None \ and self.enforced_level < jsd.get('level'): logger.warning("Exiting. not at proper level") return all_groups = [ oug['name'] for oug in jsd.get('organisationUnitGroups', []) ] groups = [ group for group in all_groups if group in Entity.TYPES.values() ] if not groups: if 'DPS' in all_groups: groups.append("Division Provinciale de la Santé") else: logger.warning("Requested an unwanted entity (bad group)") return etype = [k for k, v in Entity.TYPES.items() if v == groups[0]][0] if etype is None: raise ValueError("Unable to guess type") if jsd.get('parent'): parent_id = jsd.get('parent').get('id') parent = Entity.get_by_id(parent_id) if parent is None: parent = self.import_entity(parent_id) if parent is None: raise ValueError("Unable to retrieve parent") else: parent = None entity = Entity.objects.create(uuid=jsd.get('uuid'), code=jsd.get('code'), name=jsd.get('name'), short_name=jsd.get('shortName'), display_name=jsd.get('displayName'), dhis_level=jsd.get('level'), dhis_id=jsd.get('id'), etype=etype, parent=parent) return entity
def dashboard(request, entity_uuid=None, indicator_slug=None, period_str=None, *args, **kwargs): context = {'page': 'dashboard'} # entity context.update(process_entity_filter(request, entity_uuid)) root = context['entity'] if context['entity'] else Entity.get_root() context.update(process_period_filter(request, period_str, 'period')) if context['period'] is None: context['period'] = MonthPeriod.current().previous() all_indicators = Indicator.get_all_sorted() # Indicator.get_all_routine() indicator = Indicator.get_or_none(indicator_slug) context.update({ 'root': root, 'completeness': OrderedDict([ (child, get_cached_data('completeness', dps=child, period=context['period'], indicator=indicator)) for child in root.get_children() ]), 'indicators': all_indicators, 'indicator': indicator, 'lineage': [Entity.PROVINCE] }) # totals context.update({ 'mean_completeness': numpy.mean( [e['completeness'] for e in context['completeness'].values()]), 'mean_promptness': numpy.mean( [e['promptness'] for e in context['completeness'].values()]), }) # evolution of pw_anc_receiving_sp3 pwsp3 = get_timed_records(Indicator.get_by_number(59), root, context['all_periods']) perioda = context['all_periods'][0] periodb = context['all_periods'][-1] context.update({ 'sp3_title': "{num} : {name} entre {pa} et {pb}" .format(num=pwsp3['indicator'].number, name=pwsp3['indicator'].name, pa=perioda.strid, pb=periodb.strid), 'sp3_fname': "palu-evol-sp3-_{pa}_{pb}" .format(pa=perioda.strid, pb=periodb.strid), 'sp3_categories': [p[1].name for p in pwsp3['periods']], 'sp3_series': [{'name': pwsp3['indicator'].name, 'data': pwsp3['points']} ], }) return render(request, kwargs.get('template_name', 'dashboard.html'), context)
def children_geojson(request, parent_uuid): parent = Entity.get_or_none(parent_uuid) if parent is None: data = None else: data = parent.children_geojson return JsonResponse(data, safe=False)
def single_geojson(request, entity_uuid): entity = Entity.get_or_none(entity_uuid) if entity is None: data = None else: data = entity.geojson return JsonResponse(data, safe=False)
def get_entity_detail(request, entity_uuid=None): entity = Entity.get_or_none(entity_uuid) if entity is None: data = None else: data = entity.to_dict() return JsonResponse(data, safe=False)
def handle(self, *args, **options): # make sure we're at project root chdir_dmd() dps_name = options.get('dps') if not dps_name: logger.error("Unable to match DPS with name `{}`".format(dps_name)) return 1 rdc = Entity.get_root() dps = Entity.lookup_at(parent=rdc, name=dps_name)[0] if dps is None: logger.error("Unable to match DPS with name `{}`".format(dps_name)) return 1 logger.info("Generating XLS dataentry tool for `{}`".format(dps_name)) generate_dataentry_for(dps, 'dataentry.xlsx')
def png_map_for(request, perioda_str, periodb_str, entity_name, indicator_number, with_title=True, with_legend=True): entity = Entity.get_by_short_name(entity_name) if entity is None: raise Http404( _("Unknown entity with name `{u}`").format(u=entity_name)) if perioda_str is None and periodb_str is None \ and indicator_number is None: periods = None indicator = None with_title = False fname = "initial.png" else: with_title = True perioda = process_period_filter(request, perioda_str, 'perioda').get('perioda') periodb = process_period_filter(request, periodb_str, 'periodb').get('periodb') periods = MonthPeriod.all_from(perioda, periodb) if not len(periods): raise Http404( _("Unknown period interval `{pa}` `{pb}`").format( pa=perioda_str, pb=periodb_str)) indicator = Indicator.get_by_number(indicator_number) if indicator is None: raise Http404( _("Unknown indicator `{s}`").format(s=indicator_number)) fname = fname_for(entity, periods, indicator) fpath = os.path.join('png_map', fname) abspath = os.path.join(settings.EXPORT_REPOSITORY, fpath) if not os.path.exists(abspath): try: gen_map_for(entity, periods, indicator, save_as=abspath, with_title=with_title, with_index=with_title) except IOError: logger.error("Missing map png folder in exports.") raise # return redirect('export', fpath=fpath) return serve_exported_files(request, fpath=fpath)
def map(request, *args, **kwargs): context = {'page': 'map'} drc = Entity.get_root() context.update({ 'root': drc, 'periods': MonthPeriod.all_till_now(), 'dps_list': drc.get_children() }) return render(request, kwargs.get('template_name', 'map.html'), context)
def handle(self, *args, **options): # make sure we're at project root chdir_dmd() if not os.path.exists(options.get('file')): logger.error("GeoJSON file does not exit.") return False with open(options.get('file'), 'r') as f: gjson = json.load(f) rdc = Entity.get_root() for feature in gjson['features']: dps_name = feature['properties'].get('NOM_DPS') if dps_name: name = dps_name logger.debug(name) entity = Entity.objects.get(name=name) else: zs_name = feature['properties'].get('NAME') dps_name = feature['properties'].get('DPS') logger.debug("dps: {d} - zs: {z}" .format(d=dps_name, z=zs_name)) parent = Entity.find_by_stdname(parent=rdc, std_name=dps_name) logger.debug("\tparent: {p}".format(p=parent)) assert parent is not None entity, children = Entity.lookup_at(parent=parent, name=zs_name.upper()) assert entity is not None logger.info(entity) entity.geometry = json.dumps(feature['geometry']) entity.save() logger.info("done.")
def handle(self, *args, **options): # make sure we're at project root chdir_dmd() if not os.path.exists(options.get('file')): logger.error("GeoJSON file does not exit.") return False with open(options.get('file'), 'r') as f: gjson = json.load(f) rdc = Entity.get_root() for feature in gjson['features']: dps_name = feature['properties'].get('NOM_DPS') if dps_name: name = dps_name logger.debug(name) entity = Entity.objects.get(name=name) else: zs_name = feature['properties'].get('NAME') dps_name = feature['properties'].get('DPS') logger.debug("dps: {d} - zs: {z}".format(d=dps_name, z=zs_name)) parent = Entity.find_by_stdname(parent=rdc, std_name=dps_name) logger.debug("\tparent: {p}".format(p=parent)) assert parent is not None entity, children = Entity.lookup_at(parent=parent, name=zs_name.upper()) assert entity is not None logger.info(entity) entity.geometry = json.dumps(feature['geometry']) entity.save() logger.info("done.")
def handle(self, *args, **options): # make sure we're at project root chdir_dmd() dps_name = options.get('dps') if not dps_name: logger.error("Unable to match DPS with name `{}`" .format(dps_name)) return 1 rdc = Entity.get_root() dps = Entity.lookup_at(parent=rdc, name=dps_name)[0] if dps is None: logger.error("Unable to match DPS with name `{}`" .format(dps_name)) return 1 logger.info("Generating XLS dataentry tool for `{}`" .format(dps_name)) generate_dataentry_for(dps, 'dataentry.xlsx')
def png_map_for(request, perioda_str, periodb_str, entity_name, indicator_number, with_title=True, with_legend=True): entity = Entity.get_by_short_name(entity_name) if entity is None: raise Http404(_("Unknown entity with name `{u}`") .format(u=entity_name)) if perioda_str is None and periodb_str is None \ and indicator_number is None: periods = None indicator = None with_title = False fname = "initial.png" else: with_title = True perioda = process_period_filter( request, perioda_str, 'perioda').get('perioda') periodb = process_period_filter( request, periodb_str, 'periodb').get('periodb') periods = MonthPeriod.all_from(perioda, periodb) if not len(periods): raise Http404(_("Unknown period interval `{pa}` `{pb}`") .format(pa=perioda_str, pb=periodb_str)) indicator = Indicator.get_by_number(indicator_number) if indicator is None: raise Http404(_("Unknown indicator `{s}`") .format(s=indicator_number)) fname = fname_for(entity, periods, indicator) fpath = os.path.join('png_map', fname) abspath = os.path.join(settings.EXPORT_REPOSITORY, fpath) if not os.path.exists(abspath): try: gen_map_for(entity, periods, indicator, save_as=abspath, with_title=with_title, with_index=with_title) except IOError: logger.error("Missing map png folder in exports.") raise # return redirect('export', fpath=fpath) return serve_exported_files(request, fpath=fpath)
def json_data_record_for(request, period_str, entity_uuid, indicator_slug): entity = Entity.get_or_none(entity_uuid) if entity is None: raise Http404(_("Unknown entity UUID `{u}`").format(u=entity_uuid)) period = process_period_filter(request, period_str, 'period').get('period') if period is None: raise Http404(_("Unknown period `{p}`").format(p=period_str)) indicator = Indicator.get_or_none(indicator_slug) if indicator is None: raise Http404(_("Unknown indicator `{s}`").format(s=indicator_slug)) return JsonResponse(DataRecord.get_for(period, entity, indicator), safe=False)
def upload_guide_download(request, uuid, *args, **kwargs): dps = Entity.get_or_none(uuid) if dps is None: return Http404(_("No Entity to match `{uuid}`").format(uuid=uuid)) file_name = dataentry_fname_for(dps) file_content = generate_dataentry_for(dps).getvalue() response = HttpResponse(file_content, content_type='application/' 'vnd.openxmlformats-officedocument' '.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment; filename="%s"' % file_name response['Content-Length'] = len(file_content) return response
def process_entity_filter(request, entity_uuid=None): root = Entity.objects.get(level=0) entity = Entity.get_or_none(entity_uuid) if entity_uuid else root if entity is None: raise Http404(request, _("Unable to match entity `{uuid}`") .format(uuid=entity_uuid)) return { 'blank_uuid': "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 'root': root, 'entity': entity, 'lineage_data': lineage_data_for(entity), 'lineage': Entity.lineage, 'children': root.get_children(), }
def __init__(self, *args, **kwargs): instance = kwargs.get('instance') _fields = ('first_name', 'last_name', 'username', 'email', 'is_staff', 'is_active') kwargs['initial'] = model_to_dict(instance.user, _fields) \ if instance is not None else None super(PartnerForm, self).__init__(*args, **kwargs) pfields = copy.copy(self.fields) self.fields = fields_for_model(User, _fields) for k, v in pfields.items(): self.fields.update({k: v}) # limit and better display entities choices = [e.to_treed_tuple() for e in Entity.clean_tree(Entity.ZONE)] self.fields['upload_location'].choices = choices self.fields['validation_location'].choices = choices if instance: self.fields['username'].widget.attrs['readonly'] = True self.instanciated = instance is not None
def validation(request, template_name='validation.html'): context = {'page': 'validation'} # recent periods for tally suggestion recent_periods = [MonthPeriod.current().previous()] for __ in range(2): recent_periods.append(recent_periods[-1].previous()) context.update({'recent_periods': recent_periods}) now = timezone.now() rdc = Entity.get_root() validation_location = request.user.partner.validation_location records = DataRecord.objects \ .filter(validation_status=DataRecord.NOT_VALIDATED) if validation_location != rdc: other_dps = list(rdc.get_children()) other_dps.remove(validation_location) records = records.exclude(entity__in=other_dps) \ .exclude(entity__parent__in=other_dps) # set order records = records.order_by('-created_on') nb_total = records.count() records = records[:MAX_RESULTS] if request.method == 'POST': form = ValidationForm(request.POST, records=records) if form.is_valid(): counts = { 'untouched': 0, 'updated': 0, 'validated': 0, 'rejected': 0 } for dr in records: status = form.cleaned_data.get('status-{}'.format(dr.id)) if status == form.WAIT: counts['untouched'] += 1 continue if status == 'edit': numerator = form.cleaned_data.get('numerator-{}'.format( dr.id)) denominator = form.cleaned_data.get( 'denominator-{}'.format(dr.id)) dr.numerator = numerator dr.denominator = denominator dr.record_update(request.user.partner) counts['updated'] += 1 status = form.VALIDATED if status in (form.VALIDATED, form.REJECTED): if status == form.VALIDATED: counts['validated'] += 1 else: counts['rejected'] += 1 dr.record_validation(status=status, on=now, by=request.user.partner) messages.info( request, _("Thank You for your validations. {w} data were left " "untouched, {u} were updated, {v} validated " "and {r} rejected.").format(w=counts['untouched'], u=counts['updated'], v=counts['validated'], r=counts['rejected'])) return redirect('validation') else: # django form validation errors pass else: form = ValidationForm(records=records) context.update({'form': form, 'records': records, 'nb_total': nb_total}) return render(request, template_name, context)
def lineage_data_for(entity): if entity is None: return [] lad = entity.lineage_data return [lad.get(ts, "") if ts != entity.etype else entity.uuid for ts in Entity.lineage()]
def read_xls(filepath, partner): if not partner.can_upload: raise UploadPermissionDenied(_("{user} is not allowed to submit data") .format(user=partner)) try: wb = load_workbook(filepath, data_only=True) except InvalidFileException: raise IncorrectExcelFile(_("Not a proper XLSX Template.")) ws = wb.active nb_rows = len(ws.rows) cd = lambda row, column: ws.cell(row=row, column=column).value # data holder data = { 'errors': [], } # record now's time to compare with delays submited_on = timezone.now() def add_error(row, column=None, indicator=None, error=None, text=None): data['errors'].append({ 'row': row, 'column': column, 'indicator': Indicator.get_or_none(indicator.slug) if indicator else None, 'slug': error, 'text': text, }) # retrieve and store default year/month default_year_addr = "=$C$4" default_month_addr = "=$D$4" try: default_year = int(float(cd(4, 3))) except: default_year = None try: default_month = int(float(cd(4, 4))) except: default_month = None for row in range(5, nb_rows + 1): row_has_data = sum([1 for x in ws.rows[row - 1][4:] if x.value]) if not row_has_data: continue rdc = Entity.get_root() dps = Entity.find_by_stdname(cd(row, 1), parent=rdc) if dps is None: logger.warning("No DPS for row #{}".format(row)) continue # no DPS, no data zs = Entity.find_by_stdname(cd(row, 2), parent=dps) if zs is None: if cd(row, 2).lower().strip() != "-": logger.warning("No ZS for row #{}".format(row)) continue # no ZS, no data else: entity = dps else: entity = zs # check upload location authorization if partner.upload_location not in entity.get_ancestors(): add_error(row, error='permission_denied', text=_("You can't submit data for " "that location ({entity})") .format(entity=entity)) continue # retrieve period year_str = cd(row, 3) if year_str == default_year_addr: year = default_year else: try: year = int(float(year_str)) except: year = None month_str = cd(row, 4) if month_str == default_month_addr: month = default_month else: try: month = int(float(month_str)) except: month = None if year is None or month is None: logger.warning("No year or month for row #{}".format(row)) add_error(row, error='incorrect_period', text=_("Missing year or month at row {row}") .format(row)) continue try: period = MonthPeriod.get_or_create(year, month) except ValueError as e: logger.warning("Unable to retrieve period: {}".format(e)) add_error(row, error='incorrect_period', text=_("Unable to retrieve period for {y}/{m}") .format(y=year, m=month)) continue for idx, cell in enumerate(ws.rows[2][4:]): if idx % 3 != 0: continue # skip empty merged cols column = letter_to_column(cell.column) try: number = cell.value.split('-')[0].strip() except: # the header doesn't respect the format # better fail everything raise IncorrectExcelFile(_("Not a proper XLSX Template.")) num = cd(row, column) denom = cd(row, column + 1) # skip if missing numerator if num is None: # logger.debug("No numerator for indic #{}".format(number)) continue indicator = Indicator.get_by_number(number) # not an expected number if indicator is None: logger.warning("No indicator found at col #{}".format(column)) add_error(row, column=cell.column, error='incorrect_indicator', text=_("Unable to match an Indicator at col {col}") .format(col=cell.column)) continue # ensure level of data submitted depending on type if indicator.collection_type == indicator.SURVEY: # data must be DPS if not entity.is_dps: logger.warning("Data for Survey Indic on non-DPS #{}" .format(cell.coordinate)) add_error(row, column=cell.column, error='incorect_level', text=_("Survey indicator require DPS data")) continue elif indicator.collection_type == indicator.ROUTINE: # data must be ZS if not entity.is_zs: logger.warning("Data for Routine Indic on non-ZS #{}" .format(cell.coordinate)) add_error(row, column=cell.column, error='incorect_level', text=_("Routine indicator require ZS data")) continue # check submission period for that Indicator if not indicator.can_submit_on(on=submited_on, period=period): logger.warning("{on} is not a valid submission time " "for {ind} {period}" .format(on=submited_on, ind=indicator, period=period)) add_error(row, column=cell.column, indicator=indicator, error='outside_submussion_delay', text=_("{on} is outside submission period " "for Indicator #{ind} at {period}") .format(on=submited_on.strftime('%d-%m-%Y'), ind=indicator.number, period=period)) continue if not indicator.is_number and denom is None: logger.warning("No denominator for indic #{}".format(number)) add_error(row, column=cell.column, error='missing_denominator', text=_("Missing denominator on " "non-number Indicator")) continue elif indicator.is_number: denom = 1 try: num = float(num) denom = float(denom) except: add_error(row, column=cell.column, error='incorrect_value', indicator=indicator, text=_("Incorrect value for numerator " "or denominator `{num} / {denom}`") .format(num=num, denom=denom)) continue data.update({data_ident_for(indicator, period, entity): { 'slug': indicator.slug, 'period': period, 'entity': entity, 'numerator': num, 'denominator': denom}}) return data
def handle(self, *args, **options): # make sure we're at project root chdir_dmd() # options parsing self.debug = options.get('debug') update = options.get('update') period = MonthPeriod.get_or_none(options.get('period')) if period is None: logger.error( "Unable to match an actual period from `{}`".format(period)) if options.get('previous', False): periods = [] p = period while p > MonthPeriod.objects.all().first(): periods.append(p) if len(periods) >= NB_PREVIOUS_PERIODS: break p = p.previous() else: periods = [period] upath = '/analytics.json' indicators = { i.slug: (i.dhis_numerator_id, i.dhis_denominator_id) for i in Indicator.get_all_dhis() } dhis_ids = list( set([v[0] for v in indicators.values()] + [v[1] for v in indicators.values()])) drc = Entity.get_root() params = { 'dimension': [ 'dx:{}'.format(";".join(dhis_ids)), 'pe:{}'.format(";".join([pe.dhis_strid for pe in periods])) ], 'filter': 'ou:{}'.format(drc.dhis_id), 'displayProperty': 'NAME', 'outputIdScheme': 'ID', 'skipRounding': True, } logger.info(drc) if update or self.no_record_at(entity=drc, period=period): self.handle_record(get_dhis(path=upath, params=params), entity=drc, periods=periods) for dps in drc.get_children(): logger.info(dps) if not update and not self.no_record_at(entity=dps, period=period): continue dparams = copy.copy(params) dparams.update({'filter': 'ou:{}'.format(dps.dhis_id)}) self.handle_record(get_dhis(path=upath, params=dparams), entity=dps, periods=periods) # don't look for ZS if no data at DPS if self.no_record_at(entity=dps, period=period): continue for zs in dps.get_children(): logger.info(zs) if not update and not self.no_record_at(entity=zs, period=period): continue zparams = copy.copy(params) zparams.update({'filter': 'ou:{}'.format(zs.dhis_id)}) self.handle_record(get_dhis(path=upath, params=zparams), entity=zs, periods=periods) # don't look for ZS if no data at DPS if self.no_record_at(entity=zs, period=period): continue for aire in zs.get_children(): logger.info(aire) if not update and not self.no_record_at(entity=aire, period=period): continue aparams = copy.copy(params) aparams.update({'filter': 'ou:{}'.format(aire.dhis_id)}) self.handle_record(get_dhis(path=upath, params=aparams), entity=aire, periods=periods)
def generate_dataentry_for(dps, save_to=None): is_all_dps = dps == Entity.get_root() # colors black = 'FF000000' dark_gray = 'FFA6A6A6' light_gray = 'FFDEDEDE' yellow = 'F9FF00' # styles header_font = Font(name='Calibri', size=12, bold=True, italic=False, vertAlign=None, underline='none', strike=False, color=black) std_font = Font(name='Calibri', size=12, bold=False, italic=False, vertAlign=None, underline='none', strike=False, color=black) header_fill = PatternFill(fill_type=FILL_SOLID, start_color=dark_gray) yellow_fill = PatternFill(fill_type=FILL_SOLID, start_color=yellow) black_fill = PatternFill(fill_type=FILL_SOLID, start_color=black) odd_fill = PatternFill(fill_type=FILL_SOLID, start_color=light_gray) thin_black_side = Side(style='thin', color='FF000000') thick_black_side = Side(style='thick', color='FF000000') std_border = Border( left=thin_black_side, right=thin_black_side, top=thin_black_side, bottom=thin_black_side, ) thick_left_border = Border( left=thick_black_side, right=thin_black_side, top=thin_black_side, bottom=thin_black_side, ) thick_right_border = Border( right=thick_black_side, left=thin_black_side, top=thin_black_side, bottom=thin_black_side, ) centered_alignment = Alignment(horizontal='center', vertical='center', text_rotation=0, wrap_text=False, shrink_to_fit=False, indent=0) left_alignment = Alignment(horizontal='left', vertical='center') vertical_alignment = Alignment(horizontal='left', vertical='bottom', text_rotation=90, wrap_text=True, shrink_to_fit=False, indent=0) number_format = '# ### ### ##0' protected = Protection(locked=True, hidden=False) unprotected = Protection(locked=False, hidden=False) header_style = { 'font': header_font, 'fill': header_fill, 'border': std_border, 'alignment': centered_alignment, 'protection': protected } vheader_style = { 'font': std_font, 'alignment': vertical_alignment, 'protection': protected } vheader_left_style = copy.copy(vheader_style) vheader_left_style.update({'border': thick_left_border}) vheader_right_style = copy.copy(vheader_style) vheader_right_style.update({'border': thick_right_border}) std_style = { 'font': std_font, 'border': std_border, 'alignment': centered_alignment, } names_style = { 'font': std_font, 'border': std_border, 'alignment': left_alignment, } def apply_style(target, style): for key, value in style.items(): setattr(target, key, value) # data validations yv = DataValidation(type="list", formula1='"{}"'.format(",".join( [str(y) for y in range(2014, 2025)])), allow_blank=True) mv = DataValidation(type="list", formula1='"{}"'.format(",".join( [str(y) for y in range(1, 13)])), allow_blank=True) dv = DataValidation(type="whole", operator="greaterThanOrEqual", formula1='0') wb = Workbook() ws = wb.active ws.title = "Données" # sticky columns (DPS, ZS, YEAR, MONTH) ws.freeze_panes = ws['E5'] ws.add_data_validation(yv) ws.add_data_validation(mv) ws.add_data_validation(dv) # resize row height for 0, 1 xl_set_row_height(ws, 1, 2.2) xl_set_row_height(ws, 2, 2.2) # resize col A, B xl_set_col_width(ws, 1, 5.5) xl_set_col_width(ws, 2, 4.5) # write partial metadata headers ws.merge_cells("A3:A4") ws.cell("A3").value = "DPS" ws.merge_cells("B3:B4") ws.cell("B3").value = "ZS" ws.cell("C3").value = "ANNÉE" ws.cell("D3").value = "MOIS" indicator_column = 5 dps_row = 5 # zs_row = dps_row + 1 # header style for sr in openpyxl.utils.cells_from_range("A3:D4"): for coord in sr: apply_style(ws.cell(coord), header_style) for coord in ["C4", "D4"]: ws.cell(coord).fill = yellow_fill ws.cell(coord).protection = unprotected # ZS of the selected DPS children = [ child for child in dps.get_children() if child.etype == Entity.ZONE or is_all_dps ] def std_write(row, column, value, style=std_style): cell = ws.cell(row=row, column=column) cell.value = value apply_style(cell, style) def auto_calc_for(indicator, column, row): calculation = "" data = { 'num': "${l}{r}".format(l=column_to_letter(column - 2), r=row), 'denom': "${l}{r}".format(l=column_to_letter(column - 1), r=row), 'coef': indicator.TYPES_COEFFICIENT.get(indicator.itype), 'suffix': indicator.value_format.replace('{value}', '') } if indicator.itype == indicator.PROPORTION: calculation += "{num}/{denom}" else: try: calculation += "({num}*{coef})/{denom}" except ZeroDivisionError: raise formula = '=IF({num}<>"",IF({denom}<>"",' \ 'CONCATENATE(ROUND(' + calculation + ',2),"{suffix}")' \ ',"?"),"?")' return formula.format(**data) # write indicator headers column = indicator_column for indicator in Indicator.get_all_sorted(): # write top header with indic name row = 1 ws.merge_cells(start_row=row, end_row=row + 1, start_column=column, end_column=column + 2) std_write(row, column, indicator.name, vheader_style) # write header with indic number row = 3 num_str = "{n} - {t}".format(n=indicator.number, t=indicator.verbose_collection_type) ws.merge_cells(start_row=row, end_row=row, start_column=column, end_column=column + 2) std_write(row, column, num_str, header_style) apply_style(ws.cell(row=row, column=column + 1), header_style) # write sub header with NUM/DENOM row = 4 if indicator.itype == Indicator.NUMBER: ws.merge_cells(start_row=row, end_row=row, start_column=column, end_column=column + 2) std_write(row, column, "NOMBRE", std_style) for r in range(row, row + len(children) + 2): # DPS + children ws.merge_cells(start_row=r, end_row=r, start_column=column, end_column=column + 2) else: std_write(row, column, "NUMERAT", std_style) std_write(row, column + 1, "DÉNOM", std_style) std_write(row, column + 2, "CALC", std_style) row = dps_row + len(children) nb_rows = row if is_all_dps else row + 1 # whether a row displays a ZS or not row_is_zs = lambda row: False if is_all_dps else row > dps_row # row-specific styles for r in range(1, nb_rows): left = ws.cell(row=r, column=column) right = ws.cell(row=r, column=column + 1) calc = ws.cell(row=r, column=column + 2) # apply default style if r >= dps_row: apply_style(left, std_style) apply_style(right, std_style) apply_style(calc, std_style) left.number_format = number_format right.number_format = number_format calc.number_format = number_format # write formula for auto third calc calc.set_explicit_value(value=auto_calc_for( indicator=indicator, column=column + 2, row=r), data_type=calc.TYPE_FORMULA) # apply even/odd style if r % 2 == 0: if column == indicator_column: for c in range(1, indicator_column): ws.cell(row=r, column=c).fill = odd_fill ws.cell(row=r, column=column).fill = odd_fill ws.cell(row=r, column=column + 1).fill = odd_fill ws.cell(row=r, column=column + 2).fill = odd_fill # disable cell if data not expected at ZS if row_is_zs(r) and indicator.collection_level != Entity.ZONE: left.fill = black_fill left.protection = protected right.fill = black_fill right.protection = protected calc.fill = black_fill elif not row_is_zs(r) \ and indicator.collection_type == indicator.ROUTINE: left.fill = black_fill left.protection = protected right.fill = black_fill right.protection = protected calc.fill = black_fill else: left.protection = unprotected right.protection = unprotected # calc cell is always protected calc.protection = protected # apply thick borders left.border = thick_left_border # right.border = thick_right_border calc.border = thick_right_border # iterate over indicator column += 3 last_row = dps_row + len(children) # apply data validation for periods yv.ranges.append('C4:C{}'.format(last_row)) mv.ranges.append('D4:D{}'.format(last_row)) # apply positive integer validation to all cells last_column = indicator_column + len(Indicator.get_all_manual()) last_letter = column_to_letter(last_column) dv.ranges.append('E4:{c}{r}'.format(c=last_letter, r=last_row)) row = dps_row initial_row = [] if is_all_dps else [None] # write names & periods for child in initial_row + children: if is_all_dps: dps_name = child.std_name zs_name = "-" else: dps_name = dps.std_name zs_name = child.std_name if child else "-" std_write(row, 1, dps_name, names_style) std_write(row, 2, zs_name, names_style) # set default value for period year = ws.cell(row=row, column=3) year.set_explicit_value(value="=$C$4", data_type=year.TYPE_FORMULA) apply_style(year, std_style) year.protection = unprotected month = ws.cell(row=row, column=4) month.set_explicit_value(value="=$D$4", data_type=month.TYPE_FORMULA) apply_style(month, std_style) month.protection = unprotected row += 1 ws.protection.set_password("PNLP") ws.protection.enable() if save_to: logger.info("saving to {}".format(save_to)) wb.save(save_to) return stream = StringIO.StringIO() wb.save(stream) return stream
def xl_row_height(cm): """ xlwt height for a given height in centimeters """ return 28.35 * cm def xl_set_row_height(sheet, row, cm): """ change row height """ if row not in sheet.row_dimensions.keys(): sheet.row_dimensions[row] = RowDimension(worksheet=sheet) sheet.row_dimensions[row].height = xl_row_height(cm) dataentry_fname_for = lambda dps: "saisie-PNLP-{}.xlsx".format( dps.std_name if dps != Entity.get_root() else "DPS") def generate_dataentry_for(dps, save_to=None): is_all_dps = dps == Entity.get_root() # colors black = 'FF000000' dark_gray = 'FFA6A6A6' light_gray = 'FFDEDEDE' yellow = 'F9FF00' # styles header_font = Font(name='Calibri', size=12,
def generate_dataentry_for(dps, save_to=None): is_all_dps = dps == Entity.get_root() # colors black = 'FF000000' dark_gray = 'FFA6A6A6' light_gray = 'FFDEDEDE' yellow = 'F9FF00' # styles header_font = Font( name='Calibri', size=12, bold=True, italic=False, vertAlign=None, underline='none', strike=False, color=black) std_font = Font( name='Calibri', size=12, bold=False, italic=False, vertAlign=None, underline='none', strike=False, color=black) header_fill = PatternFill(fill_type=FILL_SOLID, start_color=dark_gray) yellow_fill = PatternFill(fill_type=FILL_SOLID, start_color=yellow) black_fill = PatternFill(fill_type=FILL_SOLID, start_color=black) odd_fill = PatternFill(fill_type=FILL_SOLID, start_color=light_gray) thin_black_side = Side(style='thin', color='FF000000') thick_black_side = Side(style='thick', color='FF000000') std_border = Border( left=thin_black_side, right=thin_black_side, top=thin_black_side, bottom=thin_black_side, ) thick_left_border = Border( left=thick_black_side, right=thin_black_side, top=thin_black_side, bottom=thin_black_side,) thick_right_border = Border( right=thick_black_side, left=thin_black_side, top=thin_black_side, bottom=thin_black_side,) centered_alignment = Alignment( horizontal='center', vertical='center', text_rotation=0, wrap_text=False, shrink_to_fit=False, indent=0) left_alignment = Alignment( horizontal='left', vertical='center') vertical_alignment = Alignment( horizontal='left', vertical='bottom', text_rotation=90, wrap_text=True, shrink_to_fit=False, indent=0) number_format = '# ### ### ##0' protected = Protection(locked=True, hidden=False) unprotected = Protection(locked=False, hidden=False) header_style = { 'font': header_font, 'fill': header_fill, 'border': std_border, 'alignment': centered_alignment, 'protection': protected } vheader_style = { 'font': std_font, 'alignment': vertical_alignment, 'protection': protected } vheader_left_style = copy.copy(vheader_style) vheader_left_style.update({'border': thick_left_border}) vheader_right_style = copy.copy(vheader_style) vheader_right_style.update({'border': thick_right_border}) std_style = { 'font': std_font, 'border': std_border, 'alignment': centered_alignment, } names_style = { 'font': std_font, 'border': std_border, 'alignment': left_alignment, } def apply_style(target, style): for key, value in style.items(): setattr(target, key, value) # data validations yv = DataValidation(type="list", formula1='"{}"'.format( ",".join([str(y) for y in range(2014, 2025)])), allow_blank=True) mv = DataValidation(type="list", formula1='"{}"'.format( ",".join([str(y) for y in range(1, 13)])), allow_blank=True) dv = DataValidation(type="whole", operator="greaterThanOrEqual", formula1='0') wb = Workbook() ws = wb.active ws.title = "Données" # sticky columns (DPS, ZS, YEAR, MONTH) ws.freeze_panes = ws['E5'] ws.add_data_validation(yv) ws.add_data_validation(mv) ws.add_data_validation(dv) # resize row height for 0, 1 xl_set_row_height(ws, 1, 2.2) xl_set_row_height(ws, 2, 2.2) # resize col A, B xl_set_col_width(ws, 1, 5.5) xl_set_col_width(ws, 2, 4.5) # write partial metadata headers ws.merge_cells("A3:A4") ws.cell("A3").value = "DPS" ws.merge_cells("B3:B4") ws.cell("B3").value = "ZS" ws.cell("C3").value = "ANNÉE" ws.cell("D3").value = "MOIS" indicator_column = 5 dps_row = 5 # zs_row = dps_row + 1 # header style for sr in openpyxl.utils.cells_from_range("A3:D4"): for coord in sr: apply_style(ws.cell(coord), header_style) for coord in ["C4", "D4"]: ws.cell(coord).fill = yellow_fill ws.cell(coord).protection = unprotected # ZS of the selected DPS children = [child for child in dps.get_children() if child.etype == Entity.ZONE or is_all_dps] def std_write(row, column, value, style=std_style): cell = ws.cell(row=row, column=column) cell.value = value apply_style(cell, style) def auto_calc_for(indicator, column, row): calculation = "" data = { 'num': "${l}{r}".format(l=column_to_letter(column - 2), r=row), 'denom': "${l}{r}".format(l=column_to_letter(column - 1), r=row), 'coef': indicator.TYPES_COEFFICIENT.get(indicator.itype), 'suffix': indicator.value_format.replace('{value}', '')} if indicator.itype == indicator.PROPORTION: calculation += "{num}/{denom}" else: try: calculation += "({num}*{coef})/{denom}" except ZeroDivisionError: raise formula = '=IF({num}<>"",IF({denom}<>"",' \ 'CONCATENATE(ROUND(' + calculation + ',2),"{suffix}")' \ ',"?"),"?")' return formula.format(**data) # write indicator headers column = indicator_column for indicator in Indicator.get_all_sorted(): # write top header with indic name row = 1 ws.merge_cells(start_row=row, end_row=row + 1, start_column=column, end_column=column + 2) std_write(row, column, indicator.name, vheader_style) # write header with indic number row = 3 num_str = "{n} - {t}".format(n=indicator.number, t=indicator.verbose_collection_type) ws.merge_cells(start_row=row, end_row=row, start_column=column, end_column=column + 2) std_write(row, column, num_str, header_style) apply_style(ws.cell(row=row, column=column + 1), header_style) # write sub header with NUM/DENOM row = 4 if indicator.itype == Indicator.NUMBER: ws.merge_cells(start_row=row, end_row=row, start_column=column, end_column=column + 2) std_write(row, column, "NOMBRE", std_style) for r in range(row, row + len(children) + 2): # DPS + children ws.merge_cells(start_row=r, end_row=r, start_column=column, end_column=column + 2) else: std_write(row, column, "NUMERAT", std_style) std_write(row, column + 1, "DÉNOM", std_style) std_write(row, column + 2, "CALC", std_style) row = dps_row + len(children) nb_rows = row if is_all_dps else row + 1 # whether a row displays a ZS or not row_is_zs = lambda row: False if is_all_dps else row > dps_row # row-specific styles for r in range(1, nb_rows): left = ws.cell(row=r, column=column) right = ws.cell(row=r, column=column + 1) calc = ws.cell(row=r, column=column + 2) # apply default style if r >= dps_row: apply_style(left, std_style) apply_style(right, std_style) apply_style(calc, std_style) left.number_format = number_format right.number_format = number_format calc.number_format = number_format # write formula for auto third calc calc.set_explicit_value( value=auto_calc_for(indicator=indicator, column=column + 2, row=r), data_type=calc.TYPE_FORMULA) # apply even/odd style if r % 2 == 0: if column == indicator_column: for c in range(1, indicator_column): ws.cell(row=r, column=c).fill = odd_fill ws.cell(row=r, column=column).fill = odd_fill ws.cell(row=r, column=column + 1).fill = odd_fill ws.cell(row=r, column=column + 2).fill = odd_fill # disable cell if data not expected at ZS if row_is_zs(r) and indicator.collection_level != Entity.ZONE: left.fill = black_fill left.protection = protected right.fill = black_fill right.protection = protected calc.fill = black_fill elif not row_is_zs(r) \ and indicator.collection_type == indicator.ROUTINE: left.fill = black_fill left.protection = protected right.fill = black_fill right.protection = protected calc.fill = black_fill else: left.protection = unprotected right.protection = unprotected # calc cell is always protected calc.protection = protected # apply thick borders left.border = thick_left_border # right.border = thick_right_border calc.border = thick_right_border # iterate over indicator column += 3 last_row = dps_row + len(children) # apply data validation for periods yv.ranges.append('C4:C{}'.format(last_row)) mv.ranges.append('D4:D{}'.format(last_row)) # apply positive integer validation to all cells last_column = indicator_column + len(Indicator.get_all_manual()) last_letter = column_to_letter(last_column) dv.ranges.append('E4:{c}{r}'.format(c=last_letter, r=last_row)) row = dps_row initial_row = [] if is_all_dps else [None] # write names & periods for child in initial_row + children: if is_all_dps: dps_name = child.std_name zs_name = "-" else: dps_name = dps.std_name zs_name = child.std_name if child else "-" std_write(row, 1, dps_name, names_style) std_write(row, 2, zs_name, names_style) # set default value for period year = ws.cell(row=row, column=3) year.set_explicit_value(value="=$C$4", data_type=year.TYPE_FORMULA) apply_style(year, std_style) year.protection = unprotected month = ws.cell(row=row, column=4) month.set_explicit_value(value="=$D$4", data_type=month.TYPE_FORMULA) apply_style(month, std_style) month.protection = unprotected row += 1 ws.protection.set_password("PNLP") ws.protection.enable() if save_to: logger.info("saving to {}".format(save_to)) wb.save(save_to) return stream = StringIO.StringIO() wb.save(stream) return stream
def xl_row_height(cm): """ xlwt height for a given height in centimeters """ return 28.35 * cm def xl_set_row_height(sheet, row, cm): """ change row height """ if row not in sheet.row_dimensions.keys(): sheet.row_dimensions[row] = RowDimension(worksheet=sheet) sheet.row_dimensions[row].height = xl_row_height(cm) dataentry_fname_for = lambda dps: "saisie-PNLP-{}.xlsx".format( dps.std_name if dps != Entity.get_root() else "DPS") def generate_dataentry_for(dps, save_to=None): is_all_dps = dps == Entity.get_root() # colors black = 'FF000000' dark_gray = 'FFA6A6A6' light_gray = 'FFDEDEDE' yellow = 'F9FF00' # styles header_font = Font( name='Calibri',
def handle(self, *args, **options): # make sure we're at project root chdir_dmd() logger.info("Updating cache for dashboard completeness...") root = Entity.get_root() periods = MonthPeriod.all_till_now() all_dps = root.get_children() all_entities = list(all_dps) + [root] indicators = Indicator.objects.all() all_indicators = list(indicators) + [None] nb_items = len(periods) * len(all_dps) * len(all_indicators) nb_ran = 0 for period in periods: # logger.debug("{}".format(period)) for dps in all_dps: # logger.debug("== {}".format(dps)) for indicator in all_indicators: nb_ran += 1 # logger.debug("==== {}".format(indicator)) params = { 'dps': dps, 'period': period, 'indicator': indicator } # existing cache 4months+ old are not regenerated if period <= periods[-4]: if cache_exists_for('completeness', **params): # logger.info("***** Skipping existing.") continue update_cached_data('completeness', **params) sys.stdout.write("{}/{} - {}%\r".format( nb_ran, nb_items, int(nb_ran / nb_items * 100))) sys.stdout.flush() logger.info("Updating cache for section2/arrivals...") nb_items = len(periods) * len(all_dps) * len(indicators) nb_ran = 0 for period in periods: for entity in all_entities: for indicator in indicators: params = { 'entity': entity, 'period': period, 'indicator': indicator } if period <= periods[-4]: if cache_exists_for('section2-arrivals', **params): continue update_cached_data('section2-arrivals', entity=entity, period=period, indicator=indicator) sys.stdout.write("{}/{} - {}%\r".format( nb_ran, nb_items, int(nb_ran / nb_items * 100))) sys.stdout.flush() logger.info("Updating cache for section2/points") nb_items = len(periods) * len(all_dps) nb_ran = 0 for period in periods: for entity in all_entities: if period <= periods[-4]: if cache_exists_for('section2-points', **params): continue update_cached_data('section2-points', entity=entity, period=period) sys.stdout.write("{}/{} - {}%\r".format( nb_ran, nb_items, int(nb_ran / nb_items * 100))) sys.stdout.flush() logger.info("done.")
def validation(request, template_name='validation.html'): context = {'page': 'validation'} # recent periods for tally suggestion recent_periods = [MonthPeriod.current().previous()] for __ in range(2): recent_periods.append(recent_periods[-1].previous()) context.update({'recent_periods': recent_periods}) now = timezone.now() rdc = Entity.get_root() validation_location = request.user.partner.validation_location records = DataRecord.objects \ .filter(validation_status=DataRecord.NOT_VALIDATED) if validation_location != rdc: other_dps = list(rdc.get_children()) other_dps.remove(validation_location) records = records.exclude(entity__in=other_dps) \ .exclude(entity__parent__in=other_dps) # set order records = records.order_by('-created_on') nb_total = records.count() records = records[:MAX_RESULTS] if request.method == 'POST': form = ValidationForm(request.POST, records=records) if form.is_valid(): counts = { 'untouched': 0, 'updated': 0, 'validated': 0, 'rejected': 0 } for dr in records: status = form.cleaned_data.get('status-{}'.format(dr.id)) if status == form.WAIT: counts['untouched'] += 1 continue if status == 'edit': numerator = form.cleaned_data.get('numerator-{}' .format(dr.id)) denominator = form.cleaned_data.get('denominator-{}' .format(dr.id)) dr.numerator = numerator dr.denominator = denominator dr.record_update(request.user.partner) counts['updated'] += 1 status = form.VALIDATED if status in (form.VALIDATED, form.REJECTED): if status == form.VALIDATED: counts['validated'] += 1 else: counts['rejected'] += 1 dr.record_validation( status=status, on=now, by=request.user.partner) messages.info(request, _( "Thank You for your validations. {w} data were left " "untouched, {u} were updated, {v} validated " "and {r} rejected.").format( w=counts['untouched'], u=counts['updated'], v=counts['validated'], r=counts['rejected'])) return redirect('validation') else: # django form validation errors pass else: form = ValidationForm(records=records) context.update({ 'form': form, 'records': records, 'nb_total': nb_total }) return render(request, template_name, context)
def handle(self, *args, **options): # make sure we're at project root chdir_dmd() # options parsing self.debug = options.get('debug') update = options.get('update') period = MonthPeriod.get_or_none(options.get('period')) if period is None: logger.error("Unable to match an actual period from `{}`" .format(period)) if options.get('previous', False): periods = [] p = period while p > MonthPeriod.objects.all().first(): periods.append(p) if len(periods) >= NB_PREVIOUS_PERIODS: break p = p.previous() else: periods = [period] upath = '/analytics.json' indicators = {i.slug: (i.dhis_numerator_id, i.dhis_denominator_id) for i in Indicator.get_all_dhis()} dhis_ids = list(set([v[0] for v in indicators.values()] + [v[1] for v in indicators.values()])) drc = Entity.get_root() params = { 'dimension': ['dx:{}'.format(";".join(dhis_ids)), 'pe:{}'.format( ";".join([pe.dhis_strid for pe in periods]))], 'filter': 'ou:{}'.format(drc.dhis_id), 'displayProperty': 'NAME', 'outputIdScheme': 'ID', 'skipRounding': True, } logger.info(drc) if update or self.no_record_at(entity=drc, period=period): self.handle_record(get_dhis(path=upath, params=params), entity=drc, periods=periods) for dps in drc.get_children(): logger.info(dps) if not update and not self.no_record_at(entity=dps, period=period): continue dparams = copy.copy(params) dparams.update({'filter': 'ou:{}'.format(dps.dhis_id)}) self.handle_record(get_dhis(path=upath, params=dparams), entity=dps, periods=periods) # don't look for ZS if no data at DPS if self.no_record_at(entity=dps, period=period): continue for zs in dps.get_children(): logger.info(zs) if not update and not self.no_record_at(entity=zs, period=period): continue zparams = copy.copy(params) zparams.update({'filter': 'ou:{}'.format(zs.dhis_id)}) self.handle_record(get_dhis(path=upath, params=zparams), entity=zs, periods=periods) # don't look for ZS if no data at DPS if self.no_record_at(entity=zs, period=period): continue for aire in zs.get_children(): logger.info(aire) if not update and not self.no_record_at(entity=aire, period=period): continue aparams = copy.copy(params) aparams.update({'filter': 'ou:{}'.format(aire.dhis_id)}) self.handle_record(get_dhis(path=upath, params=aparams), entity=aire, periods=periods)
def dashboard(request, entity_uuid=None, indicator_slug=None, period_str=None, *args, **kwargs): context = {'page': 'dashboard'} # entity context.update(process_entity_filter(request, entity_uuid)) root = context['entity'] if context['entity'] else Entity.get_root() context.update(process_period_filter(request, period_str, 'period')) if context['period'] is None: context['period'] = MonthPeriod.current().previous() all_indicators = Indicator.get_all_sorted() # Indicator.get_all_routine() indicator = Indicator.get_or_none(indicator_slug) context.update({ 'root': root, 'completeness': OrderedDict([(child, get_cached_data('completeness', dps=child, period=context['period'], indicator=indicator)) for child in root.get_children()]), 'indicators': all_indicators, 'indicator': indicator, 'lineage': [Entity.PROVINCE] }) # totals context.update({ 'mean_completeness': numpy.mean( [e['completeness'] for e in context['completeness'].values()]), 'mean_promptness': numpy.mean([e['promptness'] for e in context['completeness'].values()]), }) # evolution of pw_anc_receiving_sp3 pwsp3 = get_timed_records(Indicator.get_by_number(59), root, context['all_periods']) perioda = context['all_periods'][0] periodb = context['all_periods'][-1] context.update({ 'sp3_title': "{num} : {name} entre {pa} et {pb}".format( num=pwsp3['indicator'].number, name=pwsp3['indicator'].name, pa=perioda.strid, pb=periodb.strid), 'sp3_fname': "palu-evol-sp3-_{pa}_{pb}".format(pa=perioda.strid, pb=periodb.strid), 'sp3_categories': [p[1].name for p in pwsp3['periods']], 'sp3_series': [{ 'name': pwsp3['indicator'].name, 'data': pwsp3['points'] }], }) return render(request, kwargs.get('template_name', 'dashboard.html'), context)
def handle(self, *args, **options): # make sure we're at project root chdir_dmd() logger.info("Updating cache for dashboard completeness...") root = Entity.get_root() periods = MonthPeriod.all_till_now() all_dps = root.get_children() all_entities = list(all_dps) + [root] indicators = Indicator.objects.all() all_indicators = list(indicators) + [None] nb_items = len(periods) * len(all_dps) * len(all_indicators) nb_ran = 0 for period in periods: # logger.debug("{}".format(period)) for dps in all_dps: # logger.debug("== {}".format(dps)) for indicator in all_indicators: nb_ran += 1 # logger.debug("==== {}".format(indicator)) params = { 'dps': dps, 'period': period, 'indicator': indicator } # existing cache 4months+ old are not regenerated if period <= periods[-4]: if cache_exists_for('completeness', **params): # logger.info("***** Skipping existing.") continue update_cached_data('completeness', **params) sys.stdout.write("{}/{} - {}%\r" .format(nb_ran, nb_items, int(nb_ran / nb_items * 100))) sys.stdout.flush() logger.info("Updating cache for section2/arrivals...") nb_items = len(periods) * len(all_dps) * len(indicators) nb_ran = 0 for period in periods: for entity in all_entities: for indicator in indicators: params = { 'entity': entity, 'period': period, 'indicator': indicator } if period <= periods[-4]: if cache_exists_for('section2-arrivals', **params): continue update_cached_data('section2-arrivals', entity=entity, period=period, indicator=indicator) sys.stdout.write("{}/{} - {}%\r" .format(nb_ran, nb_items, int(nb_ran / nb_items * 100))) sys.stdout.flush() logger.info("Updating cache for section2/points") nb_items = len(periods) * len(all_dps) nb_ran = 0 for period in periods: for entity in all_entities: if period <= periods[-4]: if cache_exists_for('section2-points', **params): continue update_cached_data('section2-points', entity=entity, period=period) sys.stdout.write("{}/{} - {}%\r" .format(nb_ran, nb_items, int(nb_ran / nb_items * 100))) sys.stdout.flush() logger.info("done.")