def import_stock_report_dump(sourcefile): with open(sourcefile) as f: reader = csv.DictReader(f) for row in reader: site = Location.get_by_code(row['Site ID']) if not site: print(row) continue reporter = Connection.objects.get(identity=row['Mobile']).contact.worker created = utc.localize(datetime.utcfromtimestamp(int(row['Timestamp']))) logs = [] for chunk in row['Items'].split(';'): bits = chunk.split() item = Item.get_by_code(bits[0]) received = int(bits[1]) holding = int(bits[2]) logs.append(InventoryLog.objects.create(item=item, last_quantity_received=received, current_holding=holding)) report = StockReport.objects.create(site=site, reporter=reporter, created=created) report.logs.add(*logs) print('Done')
def clean_site_id(self): site_id = self.cleaned_data.get('site_id') if site_id: site = Location.get_by_code(site_id) if not site: raise forms.ValidationError(_( 'Error in site ID - are you missing one or more zeros? Please enter or correct the site ID and resend.')) else: site = self.connection.contact.worker.site if not site.is_site: raise forms.ValidationError(_( 'Error in site ID. Please enter or correct the site ID and resend.')) # Nigeria-specific Site ID check if not site.hcid.isdigit(): raise forms.ValidationError(_('Error in site ID. Please enter or correct the site ID and resend.')) worker = self.connection.contact.worker if not worker.site.is_ancestor_of(site, include_self=True): raise forms.ValidationError(_( 'Error in site ID. Please enter or correct the site ID and resend.')) self.cleaned_data['site'] = site self.cleaned_data['reporter'] = worker return site_id
def import_personnel_dump(sourcefile): with open(sourcefile) as f: reader = csv.DictReader(f) for row in reader: connection = Connection.objects.get(identity=row['Mobile']) if not connection.contact: contact = Contact.objects.create(name=row['Name']) connection.contact = contact connection.save() site = Location.get_by_code(row['Site ID']) if not site: print(row) continue position = Position.get_by_code(row['Position']) email = row['Email'] or None Personnel.objects.create( name=row['Name'], position=position, email=email, site=site, contact=contact ) print('Done')
def handle_register(self, message, msg_text): connection, unused = getConnectionAndReporter(message, self.ALLOWED_ROLE_CODES) text = msg_text.strip() if text == u'': message.respond(self.HELP_MESSAGES[u'register']) return try: location_code, role_code, full_name = grammar(text).register() except parsley.ParseError: message.respond(self.ERROR_MESSAGES[u'invalid_message'] % {u'text': message.text}) return location = Location.get_by_code(location_code) if location is None: message.respond(self.ERROR_MESSAGES[u'invalid_location'] % { u'location_code': location_code, u'text': message.text }) return role = Role.get_by_code(role_code) if role is None or role.code.lower() not in self.ALLOWED_ROLE_CODES: message.respond(self.ERROR_MESSAGES[u'invalid_role'] % { u'role_code': role_code, u'text': message.text }) return kwargs = {u'location': location, u'role': role} kwargs[u'alias'], kwargs[u'first_name'], kwargs[ u'last_name'] = Reporter.parse_name(full_name) rep = Reporter(**kwargs) if Reporter.exists(rep, connection): message.respond( self.RESPONSE_MESSAGES[u'already_registered'] % { u'name': rep.first_name, u'role': rep.role.name, u'location': rep.location.name, u'location_type': rep.location.type.name }) return rep.save() connection.reporters.add(rep) message.respond( self.RESPONSE_MESSAGES[u'register'] % { u'name': rep.first_name, u'role': rep.role.code, u'location': rep.location.name, u'location_type': rep.location.type.name })
def handle_report(self, message, msg_text): connection, reporter = getConnectionAndReporter( message, self.ALLOWED_ROLE_CODES) text = msg_text.strip() if text == u'': message.respond(self.HELP_MESSAGES[u'report']) return if reporter is None: message.respond(self.ERROR_MESSAGES[u'not_registered']) return try: location_code, pairs = text.split(None, 1) pairs = grammar(pairs).report_list() except (ValueError, parsley.ParseError): message.respond(self.ERROR_MESSAGES[u'invalid_message'] % {u'text': message.text}) return location = Location.get_by_code(location_code) if location is None: message.respond(self.ERROR_MESSAGES[u'invalid_location'] % { u'location_code': location_code, u'text': message.text }) return amounts = [] commodities = [] for code, amount in pairs: result = process.extractOne(code, commodity_codes, score_cutoff=50) if result is None: continue comm = result[0] amounts.append(amount) commodities.append(comm.upper()) Report.objects.create(reporter=reporter, time=now(), connection=connection, location=location, commodity=comm, immunized=amount) response_pairs = u', '.join(u'{}={}'.format(a, b) for a, b in zip(commodities, amounts)) message.respond( self.RESPONSE_MESSAGES[u'report'] % { u'location': location.name, u'location_type': location.type.name, u'pairs': response_pairs, u'name': reporter.first_name })
def clean_hcid(self): hcid = self.cleaned_data['hcid'] loc = Location.get_by_code(hcid) if loc: raise forms.ValidationError(_('Site ID is already in use')) return hcid
def clean_site_id(self): site_id = self.cleaned_data['site_id'] location = Location.get_by_code(site_id) if not location: raise forms.ValidationError(_( 'The site ID you entered does not exist - are you missing one or more zeros? Please try again.')) self.cleaned_data['site'] = location return site_id
def handle_shortage(self, message, msg_text): connection, reporter = getConnectionAndReporter( message, self.ALLOWED_ROLE_CODES) text = msg_text.strip() if text == u'': message.respond(self.HELP_MESSAGES[u'shortage']) return if reporter is None: message.respond(self.ERROR_MESSAGES[u'not_registered']) return try: location_code, codes = text.split(None, 1) codes = grammar(codes).shortage_list() except (ValueError, parsley.ParseError): message.respond(self.ERROR_MESSAGES[u'invalid_message'] % {u'text': message.text}) return location = Location.get_by_code(location_code) if location is None: message.respond(self.ERROR_MESSAGES[u'invalid_location'] % { u'location_code': location_code, u'text': message.text }) return results = [ process.extractOne(c, codes, score_cutoff=50) for c in codes ] commodity = None for result in results: if result is None: continue comm = result[0] if commodity is None: commodity = comm Shortage.objects.create(time=now(), commodity=comm, reporter=reporter, location=location, connection=connection) message.respond( self.RESPONSE_MESSAGES[u'shortage'] % { u'location': location.name, u'location_type': location.type.name, u'commodity': commodity.upper() })
def facilities(request): ng = Location.get_by_code('ng') ancestor_pk = request.GET.get('ancestor', ng.pk) facility_dataframe = pd.read_sql_query(raw_queries.FACILITY_QUERY, connection, params=[ancestor_pk]) response = HttpResponse(content_type='text/csv') facility_dataframe.to_csv(response, encoding='UTF-8', index=False) return response
def parse_message(self, text): words = text.upper().split() location = Location.get_by_code(words[-1]) if location: location_code = words.pop() else: location_code = None parsed = dict(izip_longest(self.fields, words)) if location_code: parsed['site_id'] = location_code return parsed
def handle_nc(self, message, msg_text): connection, reporter = getConnectionAndReporter( message, self.ALLOWED_ROLE_CODES) text = msg_text.strip() if text == u'': message.respond(self.HELP_MESSAGES[u'nc']) return if reporter is None: message.respond(self.ERROR_MESSAGES[u'not_registered']) return try: location_code, reason_code, cases = grammar(text).noncompliance() except parsley.ParseError: message.respond(self.ERROR_MESSAGES[u'invalid_message'] % {u'text': message.text}) return location = Location.get_by_code(location_code) if location is None: message.respond(self.ERROR_MESSAGES[u'invalid_location'] % { u'location_code': location_code, u'text': message.text }) return if reason_code not in reason_codes: message.respond(self.ERROR_MESSAGES[u'invalid_reason'] % { u'reason_code': reason_code, u'text': message.text }) report = NonCompliance.objects.create(reporter=reporter, location=location, reason=reason_code, cases=cases, time=now(), connection=connection) message.respond( self.RESPONSE_MESSAGES[u'nc'] % { u'location': location.name, u'reason': report.get_reason_display(), u'cases': cases, u'location_type': location.type.name })
def parse_message(self, text): parsed = {'item_codes': set()} words = text.upper().split() if not words: return parsed location = Location.get_by_code(words[-1]) if location: words.pop() parsed['site'] = location parsed['item_codes'] = set(words) return parsed
def get_dataframe_lite(level, year, month): start, end, u1_start, u1_end = get_boundary_dates(year, month) reporting_params = [start, end] prior_u1_params = [u1_start, u1_end] reporting_attribute_name = level.upper() + '_REPORTING_LITE_QUERY' prior_attribute_name = level.upper() + '_PREV_U1_QUERY' query_reporting = getattr(queries, reporting_attribute_name, None) query_prior = getattr(queries, prior_attribute_name, None) if query_reporting is None or query_prior is None: return pd.DataFrame() reporting_dataframe = pd.read_sql_query(query_reporting, connection, params=reporting_params).fillna(0) prior_u1_dataframe = pd.read_sql_query(query_prior, connection, params=prior_u1_params).fillna(0) # estimate_df = CensusResult.get_estimate_dataframe(year, month) estimate_df = get_estimate_dataframe(year, month) if level == 'country': loc = Location.get_by_code('ng') reporting_dataframe['loc_id'] = loc.id reporting_dataframe['loc'] = loc.name prior_u1_dataframe['loc_id'] = loc.id prior_u1_dataframe['loc'] = loc.name dataframe = pd.concat([ reporting_dataframe.set_index('loc_id'), prior_u1_dataframe.drop('loc', axis=1).set_index('loc_id'), estimate_df ], axis=1, join='inner') dataframe['u1_perf'] = dataframe['u1'] / dataframe['u1_estimate'] dataframe['u5_perf'] = (dataframe['u5'] + dataframe['prev_u1']) / dataframe['u5_estimate'] return dataframe
def import_program_report_dump(sourcefile): with open(sourcefile) as f: reader = csv.DictReader(f) for row in reader: site = Location.get_by_code(row['Site ID']) if not site: print(row) continue reporter = Connection.objects.get(identity=row['Mobile']).contact.worker created = utc.localize(datetime.utcfromtimestamp(int(row['Timestamp']))) modified = created group = PatientGroup.get_by_code(row['Group']) program = Program.get_by_code(row['Program']) period_code = row['Period code'] period_number = row['Period number'] atot = int(row['Atot']) tin = int(row['Tin']) tout = int(row['Tout']) dead = int(row['Dead']) deft = int(row['DefT']) dcur = int(row['Dcur']) dmed = int(row['Dmed']) ProgramReport.objects.create(site=site, reporter=reporter, created=created, modified=modified, group=group, program=program, period_number=period_number, period_code=period_code, new_marasmic_patients=atot, patients_transferred_in=tin, patients_transferred_out=tout, patient_deaths=dead, unconfirmed_patient_defaults=deft, patients_cured=dcur, unresponsive_patients=dmed) print('Done')
def import_stockout_dump(sourcefile): with open(sourcefile) as f: reader = csv.DictReader(f) for row in reader: site = Location.get_by_code(row['Site ID']) reporter = Connection.objects.get(identity=row['Mobile']).contact.worker created = utc.localize(datetime.utcfromtimestamp(int(row['Timestamp']))) modified = created stockout = StockOutReport.objects.create(site=site, reporter=reporter, created=created, modified=modified) items = [] for item_code in row['Items'].split(','): items.append(Item.get_by_code(item_code.strip())) stockout.items.add(*items) print('Done')
def compute_reporting(query_date): start_date = beginning_of_the_month(query_date) end_date = end_of_the_month(query_date) ng = Location.get_by_code('ng') params = [start_date, end_date, ng.id] reporting_df = pd.read_sql_query( CENTRE_REPORTING_QUERY, db_connection, params=params) report_data = {} national_breakdown = [] grouped_df = reporting_df.groupby(['state_id', 'state']) for group_key in sorted(grouped_df.groups.keys(), key=itemgetter(1)): state_id, state_name = group_key state_stats = {'name': state_name} current_group = grouped_df.get_group(group_key).drop( ['state_id', 'state', 'lga_id'], axis=1 ).rename(columns={'lga': 'name'}) state_data = current_group.sum() state_data['name'] = state_name state_stats['breakdown'] = current_group.to_dict(orient='records') state_stats['summary'] = state_data national_breakdown.append(state_stats['summary']) report_data[state_id] = state_stats national_stats = {'name': ng.name} national_stats['breakdown'] = national_breakdown national_data = reporting_df.sum().drop( ['state', 'state_id', 'lga', 'lga_id']) national_data['name'] = ng.name national_stats['summary'] = national_data report_data[ng.id] = national_stats return report_data
def dashboard(request, state=None, year=None, month=None): if state is None: location = Location.get_by_code('ng') else: location = get_object_or_404(Location, name__iregex=state.replace('-', '.'), type__name="State") # sanity checks try: year = int(year) if year else make_aware(datetime.now()).year month = int(month) if month else None except ValueError: return HttpResponseNotFound() if month and (month > 12): return HttpResponseNotFound() cumulative = 'cumulative' in request.GET if state: return _state_dashboard(request, location, year, month, cumulative) else: return _country_dashboard(request, location, year, month, cumulative)
def compute_reports(year, month): ng = Location.get_by_code('ng') start_date = make_aware(datetime(year, month, 1)) end_date = start_date + relativedelta(months=1, seconds=-1) report_params = [start_date, end_date, ng.id] prior_u1_params = [ start_date + relativedelta(years=-4), start_date + relativedelta(seconds=-1), ng.id ] df = pd.read_sql_query( DATA_QUERY, db_connection, params=report_params).round() prior_u1_df = pd.read_sql_query( PRIOR_DATA_QUERY, db_connection, params=prior_u1_params) # estimate_df = CensusResult.get_estimate_dataframe(year, month) estimate_df = get_estimate_dataframe(year, month) # add in population estimates df[['estimate', 'u1_estimate', 'u5_estimate']] = df.apply( lambda row: estimate_df.loc[row['lga_id']], axis=1 ) # add in prior U1 reporting, required for U5 performance df['prior_u1'] = prior_u1_df['u1'] # compute LGA performance df['u1_performance'] = df['u1'] / df['u1_estimate'] * 100 df['u5_performance'] = ( df['u5'] + df['prior_u1']) / df['u5_estimate'] * 100 # rounding df = df.round({ 'estimate': 1, 'u1_estimate': 1, 'u1_performance': 1, 'u5_estimate': 1, 'u5_performance': 1, }) report_data = {} grouped_df = df.groupby(['state_id', 'state']) national_breakdown = [] # compute state stats for group_key in sorted(grouped_df.groups.keys(), key=itemgetter(1)): state_id, state_name = group_key state_stats = {'name': state_name} state_group = grouped_df.get_group(group_key).drop( ['state_id', 'state', 'lga_id'], axis=1 ).rename(columns={'lga': 'name'}) state_data = state_group.sum() state_data['name'] = state_name state_data['estimate'] = estimate_df.loc[state_id]['estimate'] state_data['u1_estimate'] = estimate_df.loc[state_id]['u1_estimate'] state_data['u5_estimate'] = estimate_df.loc[state_id]['u5_estimate'] # compute performance state_data['u1_performance'] = round( state_data['u1'] / state_data['u1_estimate'] * 100, 1) state_data['u5_performance'] = round( (state_data['u5'] + state_data['prior_u1']) / state_data['u5_estimate'] * 100, 1) # assemble state_stats['breakdown'] = state_group.fillna('-').to_dict( orient='records') state_stats['summary'] = state_data.fillna('-').to_dict() national_breakdown.append(state_stats['summary']) report_data[state_id] = state_stats # compute national stats national_data = df.sum().drop(['state_id', 'state', 'lga_id', 'lga']) national_data['name'] = ng.name national_data['estimate'] = estimate_df.loc[ng.id]['estimate'] national_data['u1_estimate'] = estimate_df.loc[ng.id]['u1_estimate'] national_data['u5_estimate'] = estimate_df.loc[ng.id]['u5_estimate'] # compute performance national_data['u1_performance'] = round(national_data['u1'] / national_data['u1_estimate'] * 100, 1) national_data['u5_performance'] = round(( national_data['u5'] + national_data['prior_u1']) / national_data['u5_estimate'] * 100, 1) report_data[ng.id] = { 'breakdown': national_breakdown, 'summary': national_data.fillna('-').to_dict() } return report_data