def dashboard(request, entity_uuid=None, indicator_slug=None, period_str=None, *args, **kwargs): context = {'page': 'dashboard'} # entity context.update(process_entity_filter(request, entity_uuid)) root = context['entity'] if context['entity'] else Entity.get_root() context.update(process_period_filter(request, period_str, 'period')) if context['period'] is None: context['period'] = MonthPeriod.current().previous() all_indicators = Indicator.get_all_sorted() # Indicator.get_all_routine() indicator = Indicator.get_or_none(indicator_slug) context.update({ 'root': root, 'completeness': OrderedDict([ (child, get_cached_data('completeness', dps=child, period=context['period'], indicator=indicator)) for child in root.get_children() ]), 'indicators': all_indicators, 'indicator': indicator, 'lineage': [Entity.PROVINCE] }) # totals context.update({ 'mean_completeness': numpy.mean( [e['completeness'] for e in context['completeness'].values()]), 'mean_promptness': numpy.mean( [e['promptness'] for e in context['completeness'].values()]), }) # evolution of pw_anc_receiving_sp3 pwsp3 = get_timed_records(Indicator.get_by_number(59), root, context['all_periods']) perioda = context['all_periods'][0] periodb = context['all_periods'][-1] context.update({ 'sp3_title': "{num} : {name} entre {pa} et {pb}" .format(num=pwsp3['indicator'].number, name=pwsp3['indicator'].name, pa=perioda.strid, pb=periodb.strid), 'sp3_fname': "palu-evol-sp3-_{pa}_{pb}" .format(pa=perioda.strid, pb=periodb.strid), 'sp3_categories': [p[1].name for p in pwsp3['periods']], 'sp3_series': [{'name': pwsp3['indicator'].name, 'data': pwsp3['points']} ], }) return render(request, kwargs.get('template_name', 'dashboard.html'), context)
def indicator_edit(request, slug, *args, **kwargs): context = {'page': 'indicators'} indicator = Indicator.get_or_none(slug) if request.method == 'POST': form = IndicatorForm(request.POST, instance=indicator) if form.is_valid(): with transaction.atomic(): form.save() messages.success( request, _("Indicator “{name}” has been updated.").format( name=indicator)) return redirect('indicators') else: # django form validation errors logger.debug("django form errors") pass else: form = IndicatorForm(instance=indicator) context.update({'form': form, 'indicator': indicator}) return render(request, kwargs.get('template_name', 'indicator_edit.html'), context)
def indicator_edit(request, slug, *args, **kwargs): context = {'page': 'indicators'} indicator = Indicator.get_or_none(slug) if request.method == 'POST': form = IndicatorForm(request.POST, instance=indicator) if form.is_valid(): with transaction.atomic(): form.save() messages.success(request, _("Indicator “{name}” has been updated.") .format(name=indicator)) return redirect('indicators') else: # django form validation errors logger.debug("django form errors") pass else: form = IndicatorForm(instance=indicator) context.update({ 'form': form, 'indicator': indicator}) return render(request, kwargs.get('template_name', 'indicator_edit.html'), context)
def add_error(row, column=None, indicator=None, error=None, text=None): data['errors'].append({ 'row': row, 'column': column, 'indicator': Indicator.get_or_none(indicator.slug) if indicator else None, 'slug': error, 'text': text, })
def png_map_for(request, perioda_str, periodb_str, entity_name, indicator_number, with_title=True, with_legend=True): entity = Entity.get_by_short_name(entity_name) if entity is None: raise Http404( _("Unknown entity with name `{u}`").format(u=entity_name)) if perioda_str is None and periodb_str is None \ and indicator_number is None: periods = None indicator = None with_title = False fname = "initial.png" else: with_title = True perioda = process_period_filter(request, perioda_str, 'perioda').get('perioda') periodb = process_period_filter(request, periodb_str, 'periodb').get('periodb') periods = MonthPeriod.all_from(perioda, periodb) if not len(periods): raise Http404( _("Unknown period interval `{pa}` `{pb}`").format( pa=perioda_str, pb=periodb_str)) indicator = Indicator.get_by_number(indicator_number) if indicator is None: raise Http404( _("Unknown indicator `{s}`").format(s=indicator_number)) fname = fname_for(entity, periods, indicator) fpath = os.path.join('png_map', fname) abspath = os.path.join(settings.EXPORT_REPOSITORY, fpath) if not os.path.exists(abspath): try: gen_map_for(entity, periods, indicator, save_as=abspath, with_title=with_title, with_index=with_title) except IOError: logger.error("Missing map png folder in exports.") raise # return redirect('export', fpath=fpath) return serve_exported_files(request, fpath=fpath)
def handle_record(self, jsdata, entity, periods): logger.info(periods) missing = '0.0' data = {} # loop on rows indic_data = {(indic_id, pid): val for indic_id, pid, val in jsdata['rows']} for period in periods: pid = period.dhis_strid for indicator in Indicator.get_all_dhis(): numerator = indic_data.get((indicator.dhis_numerator_id, pid)) denominator = indic_data.get( (indicator.dhis_denominator_id, pid)) if numerator is None or numerator == missing: logger.error("Missing numerator `{}` for `{}`".format( indicator.dhis_numerator_id, indicator)) continue if not indicator.is_number and (denominator is None or denominator == missing): logger.error("Missing denominator `{}` for `{}`".format( indicator.dhis_denominator_id, indicator)) continue logger.debug(data_ident_for(indicator, period, entity)) data.update({ data_ident_for(indicator, period, entity): { 'slug': indicator.slug, 'period': period, 'entity': entity, 'numerator': numerator, 'denominator': denominator } }) d = DataRecord.batch_create(data, dhisbot, source=DataRecord.DHIS, arrival_status=DataRecord.ARRIVED, auto_validate=True) if self.debug: pp(d) return d
def png_map_for(request, perioda_str, periodb_str, entity_name, indicator_number, with_title=True, with_legend=True): entity = Entity.get_by_short_name(entity_name) if entity is None: raise Http404(_("Unknown entity with name `{u}`") .format(u=entity_name)) if perioda_str is None and periodb_str is None \ and indicator_number is None: periods = None indicator = None with_title = False fname = "initial.png" else: with_title = True perioda = process_period_filter( request, perioda_str, 'perioda').get('perioda') periodb = process_period_filter( request, periodb_str, 'periodb').get('periodb') periods = MonthPeriod.all_from(perioda, periodb) if not len(periods): raise Http404(_("Unknown period interval `{pa}` `{pb}`") .format(pa=perioda_str, pb=periodb_str)) indicator = Indicator.get_by_number(indicator_number) if indicator is None: raise Http404(_("Unknown indicator `{s}`") .format(s=indicator_number)) fname = fname_for(entity, periods, indicator) fpath = os.path.join('png_map', fname) abspath = os.path.join(settings.EXPORT_REPOSITORY, fpath) if not os.path.exists(abspath): try: gen_map_for(entity, periods, indicator, save_as=abspath, with_title=with_title, with_index=with_title) except IOError: logger.error("Missing map png folder in exports.") raise # return redirect('export', fpath=fpath) return serve_exported_files(request, fpath=fpath)
def json_data_record_for(request, period_str, entity_uuid, indicator_slug): entity = Entity.get_or_none(entity_uuid) if entity is None: raise Http404(_("Unknown entity UUID `{u}`").format(u=entity_uuid)) period = process_period_filter(request, period_str, 'period').get('period') if period is None: raise Http404(_("Unknown period `{p}`").format(p=period_str)) indicator = Indicator.get_or_none(indicator_slug) if indicator is None: raise Http404(_("Unknown indicator `{s}`").format(s=indicator_slug)) return JsonResponse(DataRecord.get_for(period, entity, indicator), safe=False)
def handle_record(self, jsdata, entity, periods): logger.info(periods) missing = '0.0' data = {} # loop on rows indic_data = {(indic_id, pid): val for indic_id, pid, val in jsdata['rows']} for period in periods: pid = period.dhis_strid for indicator in Indicator.get_all_dhis(): numerator = indic_data.get((indicator.dhis_numerator_id, pid)) denominator = indic_data.get( (indicator.dhis_denominator_id, pid)) if numerator is None or numerator == missing: logger.error("Missing numerator `{}` for `{}`" .format(indicator.dhis_numerator_id, indicator)) continue if not indicator.is_number and (denominator is None or denominator == missing): logger.error("Missing denominator `{}` for `{}`" .format(indicator.dhis_denominator_id, indicator)) continue logger.debug(data_ident_for(indicator, period, entity)) data.update({data_ident_for(indicator, period, entity): { 'slug': indicator.slug, 'period': period, 'entity': entity, 'numerator': numerator, 'denominator': denominator}}) d = DataRecord.batch_create(data, dhisbot, source=DataRecord.DHIS, arrival_status=DataRecord.ARRIVED, auto_validate=True) if self.debug: pp(d) return d
def avg_arrival_for(entity, year, month): keys = ['nb_expected_reports', 'nb_arrived_reports', 'nb_prompt_reports', 'nb_prompt_reports'] data = {key: 0 for key in keys} for indicator in Indicator.get_all_routine(): idata = agg_arrival_for(indicator, entity, year, month) data.update({key: data.get(key) + idata.get(key) for key in keys}) data.update({ 'completeness': data['nb_arrived_reports'] / data['nb_expected_reports'], 'promptness': data['nb_prompt_reports'] / data['nb_expected_reports'] }) return data
def avg_arrival_for(entity, year, month): keys = [ 'nb_expected_reports', 'nb_arrived_reports', 'nb_prompt_reports', 'nb_prompt_reports' ] data = {key: 0 for key in keys} for indicator in Indicator.get_all_routine(): idata = agg_arrival_for(indicator, entity, year, month) data.update({key: data.get(key) + idata.get(key) for key in keys}) data.update({ 'completeness': data['nb_arrived_reports'] / data['nb_expected_reports'], 'promptness': data['nb_prompt_reports'] / data['nb_expected_reports'] }) return data
def handle(self, *args, **options): # make sure we're at project root chdir_dmd() # options parsing self.debug = options.get('debug') update = options.get('update') period = MonthPeriod.get_or_none(options.get('period')) if period is None: logger.error( "Unable to match an actual period from `{}`".format(period)) if options.get('previous', False): periods = [] p = period while p > MonthPeriod.objects.all().first(): periods.append(p) if len(periods) >= NB_PREVIOUS_PERIODS: break p = p.previous() else: periods = [period] upath = '/analytics.json' indicators = { i.slug: (i.dhis_numerator_id, i.dhis_denominator_id) for i in Indicator.get_all_dhis() } dhis_ids = list( set([v[0] for v in indicators.values()] + [v[1] for v in indicators.values()])) drc = Entity.get_root() params = { 'dimension': [ 'dx:{}'.format(";".join(dhis_ids)), 'pe:{}'.format(";".join([pe.dhis_strid for pe in periods])) ], 'filter': 'ou:{}'.format(drc.dhis_id), 'displayProperty': 'NAME', 'outputIdScheme': 'ID', 'skipRounding': True, } logger.info(drc) if update or self.no_record_at(entity=drc, period=period): self.handle_record(get_dhis(path=upath, params=params), entity=drc, periods=periods) for dps in drc.get_children(): logger.info(dps) if not update and not self.no_record_at(entity=dps, period=period): continue dparams = copy.copy(params) dparams.update({'filter': 'ou:{}'.format(dps.dhis_id)}) self.handle_record(get_dhis(path=upath, params=dparams), entity=dps, periods=periods) # don't look for ZS if no data at DPS if self.no_record_at(entity=dps, period=period): continue for zs in dps.get_children(): logger.info(zs) if not update and not self.no_record_at(entity=zs, period=period): continue zparams = copy.copy(params) zparams.update({'filter': 'ou:{}'.format(zs.dhis_id)}) self.handle_record(get_dhis(path=upath, params=zparams), entity=zs, periods=periods) # don't look for ZS if no data at DPS if self.no_record_at(entity=zs, period=period): continue for aire in zs.get_children(): logger.info(aire) if not update and not self.no_record_at(entity=aire, period=period): continue aparams = copy.copy(params) aparams.update({'filter': 'ou:{}'.format(aire.dhis_id)}) self.handle_record(get_dhis(path=upath, params=aparams), entity=aire, periods=periods)
def generate_dataentry_for(dps, save_to=None): is_all_dps = dps == Entity.get_root() # colors black = 'FF000000' dark_gray = 'FFA6A6A6' light_gray = 'FFDEDEDE' yellow = 'F9FF00' # styles header_font = Font(name='Calibri', size=12, bold=True, italic=False, vertAlign=None, underline='none', strike=False, color=black) std_font = Font(name='Calibri', size=12, bold=False, italic=False, vertAlign=None, underline='none', strike=False, color=black) header_fill = PatternFill(fill_type=FILL_SOLID, start_color=dark_gray) yellow_fill = PatternFill(fill_type=FILL_SOLID, start_color=yellow) black_fill = PatternFill(fill_type=FILL_SOLID, start_color=black) odd_fill = PatternFill(fill_type=FILL_SOLID, start_color=light_gray) thin_black_side = Side(style='thin', color='FF000000') thick_black_side = Side(style='thick', color='FF000000') std_border = Border( left=thin_black_side, right=thin_black_side, top=thin_black_side, bottom=thin_black_side, ) thick_left_border = Border( left=thick_black_side, right=thin_black_side, top=thin_black_side, bottom=thin_black_side, ) thick_right_border = Border( right=thick_black_side, left=thin_black_side, top=thin_black_side, bottom=thin_black_side, ) centered_alignment = Alignment(horizontal='center', vertical='center', text_rotation=0, wrap_text=False, shrink_to_fit=False, indent=0) left_alignment = Alignment(horizontal='left', vertical='center') vertical_alignment = Alignment(horizontal='left', vertical='bottom', text_rotation=90, wrap_text=True, shrink_to_fit=False, indent=0) number_format = '# ### ### ##0' protected = Protection(locked=True, hidden=False) unprotected = Protection(locked=False, hidden=False) header_style = { 'font': header_font, 'fill': header_fill, 'border': std_border, 'alignment': centered_alignment, 'protection': protected } vheader_style = { 'font': std_font, 'alignment': vertical_alignment, 'protection': protected } vheader_left_style = copy.copy(vheader_style) vheader_left_style.update({'border': thick_left_border}) vheader_right_style = copy.copy(vheader_style) vheader_right_style.update({'border': thick_right_border}) std_style = { 'font': std_font, 'border': std_border, 'alignment': centered_alignment, } names_style = { 'font': std_font, 'border': std_border, 'alignment': left_alignment, } def apply_style(target, style): for key, value in style.items(): setattr(target, key, value) # data validations yv = DataValidation(type="list", formula1='"{}"'.format(",".join( [str(y) for y in range(2014, 2025)])), allow_blank=True) mv = DataValidation(type="list", formula1='"{}"'.format(",".join( [str(y) for y in range(1, 13)])), allow_blank=True) dv = DataValidation(type="whole", operator="greaterThanOrEqual", formula1='0') wb = Workbook() ws = wb.active ws.title = "Données" # sticky columns (DPS, ZS, YEAR, MONTH) ws.freeze_panes = ws['E5'] ws.add_data_validation(yv) ws.add_data_validation(mv) ws.add_data_validation(dv) # resize row height for 0, 1 xl_set_row_height(ws, 1, 2.2) xl_set_row_height(ws, 2, 2.2) # resize col A, B xl_set_col_width(ws, 1, 5.5) xl_set_col_width(ws, 2, 4.5) # write partial metadata headers ws.merge_cells("A3:A4") ws.cell("A3").value = "DPS" ws.merge_cells("B3:B4") ws.cell("B3").value = "ZS" ws.cell("C3").value = "ANNÉE" ws.cell("D3").value = "MOIS" indicator_column = 5 dps_row = 5 # zs_row = dps_row + 1 # header style for sr in openpyxl.utils.cells_from_range("A3:D4"): for coord in sr: apply_style(ws.cell(coord), header_style) for coord in ["C4", "D4"]: ws.cell(coord).fill = yellow_fill ws.cell(coord).protection = unprotected # ZS of the selected DPS children = [ child for child in dps.get_children() if child.etype == Entity.ZONE or is_all_dps ] def std_write(row, column, value, style=std_style): cell = ws.cell(row=row, column=column) cell.value = value apply_style(cell, style) def auto_calc_for(indicator, column, row): calculation = "" data = { 'num': "${l}{r}".format(l=column_to_letter(column - 2), r=row), 'denom': "${l}{r}".format(l=column_to_letter(column - 1), r=row), 'coef': indicator.TYPES_COEFFICIENT.get(indicator.itype), 'suffix': indicator.value_format.replace('{value}', '') } if indicator.itype == indicator.PROPORTION: calculation += "{num}/{denom}" else: try: calculation += "({num}*{coef})/{denom}" except ZeroDivisionError: raise formula = '=IF({num}<>"",IF({denom}<>"",' \ 'CONCATENATE(ROUND(' + calculation + ',2),"{suffix}")' \ ',"?"),"?")' return formula.format(**data) # write indicator headers column = indicator_column for indicator in Indicator.get_all_sorted(): # write top header with indic name row = 1 ws.merge_cells(start_row=row, end_row=row + 1, start_column=column, end_column=column + 2) std_write(row, column, indicator.name, vheader_style) # write header with indic number row = 3 num_str = "{n} - {t}".format(n=indicator.number, t=indicator.verbose_collection_type) ws.merge_cells(start_row=row, end_row=row, start_column=column, end_column=column + 2) std_write(row, column, num_str, header_style) apply_style(ws.cell(row=row, column=column + 1), header_style) # write sub header with NUM/DENOM row = 4 if indicator.itype == Indicator.NUMBER: ws.merge_cells(start_row=row, end_row=row, start_column=column, end_column=column + 2) std_write(row, column, "NOMBRE", std_style) for r in range(row, row + len(children) + 2): # DPS + children ws.merge_cells(start_row=r, end_row=r, start_column=column, end_column=column + 2) else: std_write(row, column, "NUMERAT", std_style) std_write(row, column + 1, "DÉNOM", std_style) std_write(row, column + 2, "CALC", std_style) row = dps_row + len(children) nb_rows = row if is_all_dps else row + 1 # whether a row displays a ZS or not row_is_zs = lambda row: False if is_all_dps else row > dps_row # row-specific styles for r in range(1, nb_rows): left = ws.cell(row=r, column=column) right = ws.cell(row=r, column=column + 1) calc = ws.cell(row=r, column=column + 2) # apply default style if r >= dps_row: apply_style(left, std_style) apply_style(right, std_style) apply_style(calc, std_style) left.number_format = number_format right.number_format = number_format calc.number_format = number_format # write formula for auto third calc calc.set_explicit_value(value=auto_calc_for( indicator=indicator, column=column + 2, row=r), data_type=calc.TYPE_FORMULA) # apply even/odd style if r % 2 == 0: if column == indicator_column: for c in range(1, indicator_column): ws.cell(row=r, column=c).fill = odd_fill ws.cell(row=r, column=column).fill = odd_fill ws.cell(row=r, column=column + 1).fill = odd_fill ws.cell(row=r, column=column + 2).fill = odd_fill # disable cell if data not expected at ZS if row_is_zs(r) and indicator.collection_level != Entity.ZONE: left.fill = black_fill left.protection = protected right.fill = black_fill right.protection = protected calc.fill = black_fill elif not row_is_zs(r) \ and indicator.collection_type == indicator.ROUTINE: left.fill = black_fill left.protection = protected right.fill = black_fill right.protection = protected calc.fill = black_fill else: left.protection = unprotected right.protection = unprotected # calc cell is always protected calc.protection = protected # apply thick borders left.border = thick_left_border # right.border = thick_right_border calc.border = thick_right_border # iterate over indicator column += 3 last_row = dps_row + len(children) # apply data validation for periods yv.ranges.append('C4:C{}'.format(last_row)) mv.ranges.append('D4:D{}'.format(last_row)) # apply positive integer validation to all cells last_column = indicator_column + len(Indicator.get_all_manual()) last_letter = column_to_letter(last_column) dv.ranges.append('E4:{c}{r}'.format(c=last_letter, r=last_row)) row = dps_row initial_row = [] if is_all_dps else [None] # write names & periods for child in initial_row + children: if is_all_dps: dps_name = child.std_name zs_name = "-" else: dps_name = dps.std_name zs_name = child.std_name if child else "-" std_write(row, 1, dps_name, names_style) std_write(row, 2, zs_name, names_style) # set default value for period year = ws.cell(row=row, column=3) year.set_explicit_value(value="=$C$4", data_type=year.TYPE_FORMULA) apply_style(year, std_style) year.protection = unprotected month = ws.cell(row=row, column=4) month.set_explicit_value(value="=$D$4", data_type=month.TYPE_FORMULA) apply_style(month, std_style) month.protection = unprotected row += 1 ws.protection.set_password("PNLP") ws.protection.enable() if save_to: logger.info("saving to {}".format(save_to)) wb.save(save_to) return stream = StringIO.StringIO() wb.save(stream) return stream
def generate_dataentry_for(dps, save_to=None): is_all_dps = dps == Entity.get_root() # colors black = 'FF000000' dark_gray = 'FFA6A6A6' light_gray = 'FFDEDEDE' yellow = 'F9FF00' # styles header_font = Font( name='Calibri', size=12, bold=True, italic=False, vertAlign=None, underline='none', strike=False, color=black) std_font = Font( name='Calibri', size=12, bold=False, italic=False, vertAlign=None, underline='none', strike=False, color=black) header_fill = PatternFill(fill_type=FILL_SOLID, start_color=dark_gray) yellow_fill = PatternFill(fill_type=FILL_SOLID, start_color=yellow) black_fill = PatternFill(fill_type=FILL_SOLID, start_color=black) odd_fill = PatternFill(fill_type=FILL_SOLID, start_color=light_gray) thin_black_side = Side(style='thin', color='FF000000') thick_black_side = Side(style='thick', color='FF000000') std_border = Border( left=thin_black_side, right=thin_black_side, top=thin_black_side, bottom=thin_black_side, ) thick_left_border = Border( left=thick_black_side, right=thin_black_side, top=thin_black_side, bottom=thin_black_side,) thick_right_border = Border( right=thick_black_side, left=thin_black_side, top=thin_black_side, bottom=thin_black_side,) centered_alignment = Alignment( horizontal='center', vertical='center', text_rotation=0, wrap_text=False, shrink_to_fit=False, indent=0) left_alignment = Alignment( horizontal='left', vertical='center') vertical_alignment = Alignment( horizontal='left', vertical='bottom', text_rotation=90, wrap_text=True, shrink_to_fit=False, indent=0) number_format = '# ### ### ##0' protected = Protection(locked=True, hidden=False) unprotected = Protection(locked=False, hidden=False) header_style = { 'font': header_font, 'fill': header_fill, 'border': std_border, 'alignment': centered_alignment, 'protection': protected } vheader_style = { 'font': std_font, 'alignment': vertical_alignment, 'protection': protected } vheader_left_style = copy.copy(vheader_style) vheader_left_style.update({'border': thick_left_border}) vheader_right_style = copy.copy(vheader_style) vheader_right_style.update({'border': thick_right_border}) std_style = { 'font': std_font, 'border': std_border, 'alignment': centered_alignment, } names_style = { 'font': std_font, 'border': std_border, 'alignment': left_alignment, } def apply_style(target, style): for key, value in style.items(): setattr(target, key, value) # data validations yv = DataValidation(type="list", formula1='"{}"'.format( ",".join([str(y) for y in range(2014, 2025)])), allow_blank=True) mv = DataValidation(type="list", formula1='"{}"'.format( ",".join([str(y) for y in range(1, 13)])), allow_blank=True) dv = DataValidation(type="whole", operator="greaterThanOrEqual", formula1='0') wb = Workbook() ws = wb.active ws.title = "Données" # sticky columns (DPS, ZS, YEAR, MONTH) ws.freeze_panes = ws['E5'] ws.add_data_validation(yv) ws.add_data_validation(mv) ws.add_data_validation(dv) # resize row height for 0, 1 xl_set_row_height(ws, 1, 2.2) xl_set_row_height(ws, 2, 2.2) # resize col A, B xl_set_col_width(ws, 1, 5.5) xl_set_col_width(ws, 2, 4.5) # write partial metadata headers ws.merge_cells("A3:A4") ws.cell("A3").value = "DPS" ws.merge_cells("B3:B4") ws.cell("B3").value = "ZS" ws.cell("C3").value = "ANNÉE" ws.cell("D3").value = "MOIS" indicator_column = 5 dps_row = 5 # zs_row = dps_row + 1 # header style for sr in openpyxl.utils.cells_from_range("A3:D4"): for coord in sr: apply_style(ws.cell(coord), header_style) for coord in ["C4", "D4"]: ws.cell(coord).fill = yellow_fill ws.cell(coord).protection = unprotected # ZS of the selected DPS children = [child for child in dps.get_children() if child.etype == Entity.ZONE or is_all_dps] def std_write(row, column, value, style=std_style): cell = ws.cell(row=row, column=column) cell.value = value apply_style(cell, style) def auto_calc_for(indicator, column, row): calculation = "" data = { 'num': "${l}{r}".format(l=column_to_letter(column - 2), r=row), 'denom': "${l}{r}".format(l=column_to_letter(column - 1), r=row), 'coef': indicator.TYPES_COEFFICIENT.get(indicator.itype), 'suffix': indicator.value_format.replace('{value}', '')} if indicator.itype == indicator.PROPORTION: calculation += "{num}/{denom}" else: try: calculation += "({num}*{coef})/{denom}" except ZeroDivisionError: raise formula = '=IF({num}<>"",IF({denom}<>"",' \ 'CONCATENATE(ROUND(' + calculation + ',2),"{suffix}")' \ ',"?"),"?")' return formula.format(**data) # write indicator headers column = indicator_column for indicator in Indicator.get_all_sorted(): # write top header with indic name row = 1 ws.merge_cells(start_row=row, end_row=row + 1, start_column=column, end_column=column + 2) std_write(row, column, indicator.name, vheader_style) # write header with indic number row = 3 num_str = "{n} - {t}".format(n=indicator.number, t=indicator.verbose_collection_type) ws.merge_cells(start_row=row, end_row=row, start_column=column, end_column=column + 2) std_write(row, column, num_str, header_style) apply_style(ws.cell(row=row, column=column + 1), header_style) # write sub header with NUM/DENOM row = 4 if indicator.itype == Indicator.NUMBER: ws.merge_cells(start_row=row, end_row=row, start_column=column, end_column=column + 2) std_write(row, column, "NOMBRE", std_style) for r in range(row, row + len(children) + 2): # DPS + children ws.merge_cells(start_row=r, end_row=r, start_column=column, end_column=column + 2) else: std_write(row, column, "NUMERAT", std_style) std_write(row, column + 1, "DÉNOM", std_style) std_write(row, column + 2, "CALC", std_style) row = dps_row + len(children) nb_rows = row if is_all_dps else row + 1 # whether a row displays a ZS or not row_is_zs = lambda row: False if is_all_dps else row > dps_row # row-specific styles for r in range(1, nb_rows): left = ws.cell(row=r, column=column) right = ws.cell(row=r, column=column + 1) calc = ws.cell(row=r, column=column + 2) # apply default style if r >= dps_row: apply_style(left, std_style) apply_style(right, std_style) apply_style(calc, std_style) left.number_format = number_format right.number_format = number_format calc.number_format = number_format # write formula for auto third calc calc.set_explicit_value( value=auto_calc_for(indicator=indicator, column=column + 2, row=r), data_type=calc.TYPE_FORMULA) # apply even/odd style if r % 2 == 0: if column == indicator_column: for c in range(1, indicator_column): ws.cell(row=r, column=c).fill = odd_fill ws.cell(row=r, column=column).fill = odd_fill ws.cell(row=r, column=column + 1).fill = odd_fill ws.cell(row=r, column=column + 2).fill = odd_fill # disable cell if data not expected at ZS if row_is_zs(r) and indicator.collection_level != Entity.ZONE: left.fill = black_fill left.protection = protected right.fill = black_fill right.protection = protected calc.fill = black_fill elif not row_is_zs(r) \ and indicator.collection_type == indicator.ROUTINE: left.fill = black_fill left.protection = protected right.fill = black_fill right.protection = protected calc.fill = black_fill else: left.protection = unprotected right.protection = unprotected # calc cell is always protected calc.protection = protected # apply thick borders left.border = thick_left_border # right.border = thick_right_border calc.border = thick_right_border # iterate over indicator column += 3 last_row = dps_row + len(children) # apply data validation for periods yv.ranges.append('C4:C{}'.format(last_row)) mv.ranges.append('D4:D{}'.format(last_row)) # apply positive integer validation to all cells last_column = indicator_column + len(Indicator.get_all_manual()) last_letter = column_to_letter(last_column) dv.ranges.append('E4:{c}{r}'.format(c=last_letter, r=last_row)) row = dps_row initial_row = [] if is_all_dps else [None] # write names & periods for child in initial_row + children: if is_all_dps: dps_name = child.std_name zs_name = "-" else: dps_name = dps.std_name zs_name = child.std_name if child else "-" std_write(row, 1, dps_name, names_style) std_write(row, 2, zs_name, names_style) # set default value for period year = ws.cell(row=row, column=3) year.set_explicit_value(value="=$C$4", data_type=year.TYPE_FORMULA) apply_style(year, std_style) year.protection = unprotected month = ws.cell(row=row, column=4) month.set_explicit_value(value="=$D$4", data_type=month.TYPE_FORMULA) apply_style(month, std_style) month.protection = unprotected row += 1 ws.protection.set_password("PNLP") ws.protection.enable() if save_to: logger.info("saving to {}".format(save_to)) wb.save(save_to) return stream = StringIO.StringIO() wb.save(stream) return stream
def dashboard(request, entity_uuid=None, indicator_slug=None, period_str=None, *args, **kwargs): context = {'page': 'dashboard'} # entity context.update(process_entity_filter(request, entity_uuid)) root = context['entity'] if context['entity'] else Entity.get_root() context.update(process_period_filter(request, period_str, 'period')) if context['period'] is None: context['period'] = MonthPeriod.current().previous() all_indicators = Indicator.get_all_sorted() # Indicator.get_all_routine() indicator = Indicator.get_or_none(indicator_slug) context.update({ 'root': root, 'completeness': OrderedDict([(child, get_cached_data('completeness', dps=child, period=context['period'], indicator=indicator)) for child in root.get_children()]), 'indicators': all_indicators, 'indicator': indicator, 'lineage': [Entity.PROVINCE] }) # totals context.update({ 'mean_completeness': numpy.mean( [e['completeness'] for e in context['completeness'].values()]), 'mean_promptness': numpy.mean([e['promptness'] for e in context['completeness'].values()]), }) # evolution of pw_anc_receiving_sp3 pwsp3 = get_timed_records(Indicator.get_by_number(59), root, context['all_periods']) perioda = context['all_periods'][0] periodb = context['all_periods'][-1] context.update({ 'sp3_title': "{num} : {name} entre {pa} et {pb}".format( num=pwsp3['indicator'].number, name=pwsp3['indicator'].name, pa=perioda.strid, pb=periodb.strid), 'sp3_fname': "palu-evol-sp3-_{pa}_{pb}".format(pa=perioda.strid, pb=periodb.strid), 'sp3_categories': [p[1].name for p in pwsp3['periods']], 'sp3_series': [{ 'name': pwsp3['indicator'].name, 'data': pwsp3['points'] }], }) return render(request, kwargs.get('template_name', 'dashboard.html'), context)
def view(request, entity_uuid=None, perioda_str=None, periodb_str=None, indicator_slug=None, **kwargs): context = {'page': 'analysis_section2'} # handling entity context.update(process_entity_filter(request, entity_uuid)) # handling periods # context.update(process_period_filter(request, period_str, 'period')) context.update(process_period_filter(request, perioda_str, 'perioda')) context.update(process_period_filter(request, periodb_str, 'periodb')) if context['perioda'] > context['periodb']: context['perioda'], context['periodb'] = \ context['periodb'], context['perioda'] periods = MonthPeriod.all_from(context['perioda'], context['periodb']) context.update({'selected_periods': periods}) def cached_data_list(entity, periods, indicator): return [get_cached_data('section2-arrivals', entity=entity, period=period, indicator=indicator) for period in periods] context.update({ 'section': text_type(SECTION_ID), 'section_name': SECTION_NAME, 'arrivals': OrderedDict( [(indicator, cached_data_list(context['entity'], periods, indicator)) for indicator in Indicator.get_all_routine()]) }) # evolution graph cp = { 'periods': [period.to_tuple() for period in periods], 'points': [get_cached_data('section2-points', entity=context['entity'], period=period) for period in periods] } perioda = periods[0] periodb = periods[-1] context.update({ 'cp_title': "Évolution de la complétude à {name} entre {pa} et {pb}" .format(name=context['entity'].short_name, pa=perioda.strid, pb=periodb.strid), 'cp_fname': "completeness-_{pa}_{pb}" .format(pa=perioda.strid, pb=periodb.strid), 'cp_categories': [p[1].name for p in cp['periods']], 'cp_series': [{'name': "Complétude", 'data': cp['points']}] }) # absolute URI for links context.update({ 'baseurl': request.build_absolute_uri(), 'lineage': [Entity.PROVINCE]}) return render(request, kwargs.get('template_name', 'analysis_section2.html'), context)
def view(request, entity_uuid=None, perioda_str=None, periodb_str=None, indicator_slug=None, **kwargs): context = {'page': 'analysis_section2'} # handling entity context.update(process_entity_filter(request, entity_uuid)) # handling periods # context.update(process_period_filter(request, period_str, 'period')) context.update(process_period_filter(request, perioda_str, 'perioda')) context.update(process_period_filter(request, periodb_str, 'periodb')) if context['perioda'] > context['periodb']: context['perioda'], context['periodb'] = \ context['periodb'], context['perioda'] periods = MonthPeriod.all_from(context['perioda'], context['periodb']) context.update({'selected_periods': periods}) def cached_data_list(entity, periods, indicator): return [ get_cached_data('section2-arrivals', entity=entity, period=period, indicator=indicator) for period in periods ] context.update({ 'section': text_type(SECTION_ID), 'section_name': SECTION_NAME, 'arrivals': OrderedDict([(indicator, cached_data_list(context['entity'], periods, indicator)) for indicator in Indicator.get_all_routine()]) }) # evolution graph cp = { 'periods': [period.to_tuple() for period in periods], 'points': [ get_cached_data('section2-points', entity=context['entity'], period=period) for period in periods ] } perioda = periods[0] periodb = periods[-1] context.update({ 'cp_title': "Évolution de la complétude à {name} entre {pa} et {pb}".format( name=context['entity'].short_name, pa=perioda.strid, pb=periodb.strid), 'cp_fname': "completeness-_{pa}_{pb}".format(pa=perioda.strid, pb=periodb.strid), 'cp_categories': [p[1].name for p in cp['periods']], 'cp_series': [{ 'name': "Complétude", 'data': cp['points'] }] }) # absolute URI for links context.update({ 'baseurl': request.build_absolute_uri(), 'lineage': [Entity.PROVINCE] }) return render(request, kwargs.get('template_name', 'analysis_section2.html'), context)
def read_xls(filepath, partner): if not partner.can_upload: raise UploadPermissionDenied(_("{user} is not allowed to submit data") .format(user=partner)) try: wb = load_workbook(filepath, data_only=True) except InvalidFileException: raise IncorrectExcelFile(_("Not a proper XLSX Template.")) ws = wb.active nb_rows = len(ws.rows) cd = lambda row, column: ws.cell(row=row, column=column).value # data holder data = { 'errors': [], } # record now's time to compare with delays submited_on = timezone.now() def add_error(row, column=None, indicator=None, error=None, text=None): data['errors'].append({ 'row': row, 'column': column, 'indicator': Indicator.get_or_none(indicator.slug) if indicator else None, 'slug': error, 'text': text, }) # retrieve and store default year/month default_year_addr = "=$C$4" default_month_addr = "=$D$4" try: default_year = int(float(cd(4, 3))) except: default_year = None try: default_month = int(float(cd(4, 4))) except: default_month = None for row in range(5, nb_rows + 1): row_has_data = sum([1 for x in ws.rows[row - 1][4:] if x.value]) if not row_has_data: continue rdc = Entity.get_root() dps = Entity.find_by_stdname(cd(row, 1), parent=rdc) if dps is None: logger.warning("No DPS for row #{}".format(row)) continue # no DPS, no data zs = Entity.find_by_stdname(cd(row, 2), parent=dps) if zs is None: if cd(row, 2).lower().strip() != "-": logger.warning("No ZS for row #{}".format(row)) continue # no ZS, no data else: entity = dps else: entity = zs # check upload location authorization if partner.upload_location not in entity.get_ancestors(): add_error(row, error='permission_denied', text=_("You can't submit data for " "that location ({entity})") .format(entity=entity)) continue # retrieve period year_str = cd(row, 3) if year_str == default_year_addr: year = default_year else: try: year = int(float(year_str)) except: year = None month_str = cd(row, 4) if month_str == default_month_addr: month = default_month else: try: month = int(float(month_str)) except: month = None if year is None or month is None: logger.warning("No year or month for row #{}".format(row)) add_error(row, error='incorrect_period', text=_("Missing year or month at row {row}") .format(row)) continue try: period = MonthPeriod.get_or_create(year, month) except ValueError as e: logger.warning("Unable to retrieve period: {}".format(e)) add_error(row, error='incorrect_period', text=_("Unable to retrieve period for {y}/{m}") .format(y=year, m=month)) continue for idx, cell in enumerate(ws.rows[2][4:]): if idx % 3 != 0: continue # skip empty merged cols column = letter_to_column(cell.column) try: number = cell.value.split('-')[0].strip() except: # the header doesn't respect the format # better fail everything raise IncorrectExcelFile(_("Not a proper XLSX Template.")) num = cd(row, column) denom = cd(row, column + 1) # skip if missing numerator if num is None: # logger.debug("No numerator for indic #{}".format(number)) continue indicator = Indicator.get_by_number(number) # not an expected number if indicator is None: logger.warning("No indicator found at col #{}".format(column)) add_error(row, column=cell.column, error='incorrect_indicator', text=_("Unable to match an Indicator at col {col}") .format(col=cell.column)) continue # ensure level of data submitted depending on type if indicator.collection_type == indicator.SURVEY: # data must be DPS if not entity.is_dps: logger.warning("Data for Survey Indic on non-DPS #{}" .format(cell.coordinate)) add_error(row, column=cell.column, error='incorect_level', text=_("Survey indicator require DPS data")) continue elif indicator.collection_type == indicator.ROUTINE: # data must be ZS if not entity.is_zs: logger.warning("Data for Routine Indic on non-ZS #{}" .format(cell.coordinate)) add_error(row, column=cell.column, error='incorect_level', text=_("Routine indicator require ZS data")) continue # check submission period for that Indicator if not indicator.can_submit_on(on=submited_on, period=period): logger.warning("{on} is not a valid submission time " "for {ind} {period}" .format(on=submited_on, ind=indicator, period=period)) add_error(row, column=cell.column, indicator=indicator, error='outside_submussion_delay', text=_("{on} is outside submission period " "for Indicator #{ind} at {period}") .format(on=submited_on.strftime('%d-%m-%Y'), ind=indicator.number, period=period)) continue if not indicator.is_number and denom is None: logger.warning("No denominator for indic #{}".format(number)) add_error(row, column=cell.column, error='missing_denominator', text=_("Missing denominator on " "non-number Indicator")) continue elif indicator.is_number: denom = 1 try: num = float(num) denom = float(denom) except: add_error(row, column=cell.column, error='incorrect_value', indicator=indicator, text=_("Incorrect value for numerator " "or denominator `{num} / {denom}`") .format(num=num, denom=denom)) continue data.update({data_ident_for(indicator, period, entity): { 'slug': indicator.slug, 'period': period, 'entity': entity, 'numerator': num, 'denominator': denom}}) return data
def handle(self, *args, **options): # make sure we're at project root chdir_dmd() # options parsing self.debug = options.get('debug') update = options.get('update') period = MonthPeriod.get_or_none(options.get('period')) if period is None: logger.error("Unable to match an actual period from `{}`" .format(period)) if options.get('previous', False): periods = [] p = period while p > MonthPeriod.objects.all().first(): periods.append(p) if len(periods) >= NB_PREVIOUS_PERIODS: break p = p.previous() else: periods = [period] upath = '/analytics.json' indicators = {i.slug: (i.dhis_numerator_id, i.dhis_denominator_id) for i in Indicator.get_all_dhis()} dhis_ids = list(set([v[0] for v in indicators.values()] + [v[1] for v in indicators.values()])) drc = Entity.get_root() params = { 'dimension': ['dx:{}'.format(";".join(dhis_ids)), 'pe:{}'.format( ";".join([pe.dhis_strid for pe in periods]))], 'filter': 'ou:{}'.format(drc.dhis_id), 'displayProperty': 'NAME', 'outputIdScheme': 'ID', 'skipRounding': True, } logger.info(drc) if update or self.no_record_at(entity=drc, period=period): self.handle_record(get_dhis(path=upath, params=params), entity=drc, periods=periods) for dps in drc.get_children(): logger.info(dps) if not update and not self.no_record_at(entity=dps, period=period): continue dparams = copy.copy(params) dparams.update({'filter': 'ou:{}'.format(dps.dhis_id)}) self.handle_record(get_dhis(path=upath, params=dparams), entity=dps, periods=periods) # don't look for ZS if no data at DPS if self.no_record_at(entity=dps, period=period): continue for zs in dps.get_children(): logger.info(zs) if not update and not self.no_record_at(entity=zs, period=period): continue zparams = copy.copy(params) zparams.update({'filter': 'ou:{}'.format(zs.dhis_id)}) self.handle_record(get_dhis(path=upath, params=zparams), entity=zs, periods=periods) # don't look for ZS if no data at DPS if self.no_record_at(entity=zs, period=period): continue for aire in zs.get_children(): logger.info(aire) if not update and not self.no_record_at(entity=aire, period=period): continue aparams = copy.copy(params) aparams.update({'filter': 'ou:{}'.format(aire.dhis_id)}) self.handle_record(get_dhis(path=upath, params=aparams), entity=aire, periods=periods)
def batch_create(cls, data, partner, source=UPLOAD, arrival_status=None, auto_validate=False): now = timezone.now() # make sure we can rollback if something goes wrong with transaction.atomic(): for ident, dp in data.items(): # skip errors if ident == 'errors': continue slug = dp['slug'] period = dp['period'] entity = dp['entity'] indic = Indicator.get_or_none(slug) dr = cls.get_or_none(indicator=indic, period=period, entity=entity) num = dp['numerator'] denum = dp['denominator'] if dr and (dr.numerator != num or dr.denominator != denum): # do not manualy update DHIS data if dr.source == dr.DHIS and source == dr.UPLOAD: continue old_values = {'numerator': dr.numerator, 'denominator': dr.denominator} action = 'updated' dr.numerator = num dr.denominator = denum dr.record_update(partner) elif dr and dr.source == dr.UPLOAD and source == dr.DHIS: # mark data as updated by DHIS even though it's identical dr.record_update(partner) elif dr is None: old_values = None action = 'created' if arrival_status is None: arrival_status = indic.arrival_status_on( on=now, period=period) dr = cls.objects.create( indicator=indic, period=period, entity=entity, numerator=num, denominator=denum, source=source, created_by=partner) if auto_validate: dr.auto_validate(on=now) else: # new data are identical to datarecord continue data[ident].update({ 'action': action, 'id': dr.id, 'previous': old_values}) return data