def _get_district_location(self, domain, sp): locs = Location.view('locations/by_name', startkey=[domain, "DISTRICT", sp], endkey=[domain, "DISTRICT", sp + "z"], reduce=False, include_docs=True) if len(locs) > 1: locs = Location.view('locations/by_name', startkey=[domain, "DISTRICT", sp], endkey=[domain, "DISTRICT", sp], reduce=False, include_docs=True) return locs[0]
def _get_district_location(self, domain, sp): locs = Location.view('locations/by_name', startkey=[domain, "DISTRICT", sp], endkey=[domain, "DISTRICT", sp + "z"], reduce=False, include_docs=True) if len(locs) > 1: locs = Location.view('locations/by_name', startkey=[domain, "DISTRICT", sp], endkey=[domain, "DISTRICT", sp], reduce=False, include_docs=True) return locs[0]
def handle(self, *args, **options): try: domain = args[0] except IndexError: self.stderr.write('domain required\n') return self.println('Migrating...') for loc_id, case in get_supply_points_json_in_domain_by_location(domain): loc = Location.get(loc_id) old_code = case.get('site_code', '') new_code = getattr(loc, 'site_code', '') if old_code and not new_code: loc.site_code = old_code loc.save() self.println('migrated %s (%s)' % (loc.name, loc.site_code)) self.println('Verifying code uniqueness...') all_codes = Location.get_db().view('commtrack/locations_by_code', startkey=[domain], endkey=[domain, {}]) locs_by_code = map_reduce(lambda e: [(e['key'][-1].lower(), e['id'])], data=all_codes) for code, loc_ids in locs_by_code.iteritems(): if len(loc_ids) == 1: continue self.println('duplicate code [%s]' % code) locs = Location.view('_all_docs', keys=loc_ids, include_docs=True) for loc in locs: self.println(' %s [%s]' % (loc.name, loc._id))
def get_data(self): data = list(super(StockStatusBySupplyPointDataSource, self).get_data()) products = dict((r['product_id'], r['product_name']) for r in data) product_ids = sorted(products.keys(), key=lambda e: products[e]) by_supply_point = map_reduce(lambda e: [(e['location_id'], )], data=data, include_docs=True) locs = dict((loc._id, loc) for loc in Location.view( '_all_docs', keys=by_supply_point.keys(), include_docs=True)) for loc_id, subcases in by_supply_point.iteritems(): loc = locs[loc_id] by_product = dict((c['product_id'], c) for c in subcases) rec = { 'name': loc.name, 'type': loc.location_type, 'geo': loc._geopoint, } for prod in product_ids: rec.update( dict(('%s-%s' % (prod, key), by_product.get(prod, {}).get(key)) for key in ('current_stock', 'consumption', 'months_remaining', 'category'))) yield rec
def get_district_by_name(self, name): locs = Location.view('locations/by_name', startkey=[self.domain, "DISTRICT", name], endkey=[self.domain, "DISTRICT", name], reduce=False, include_docs=True) return locs
def load_locs_json(domain, selected_loc_id=None): """initialize a json location tree for drill-down controls on the client. tree is only partially initialized and branches will be filled in on the client via ajax. what is initialized: * all top level locs * if a 'selected' loc is provided, that loc and its complete ancestry """ def loc_to_json(loc): return { 'name': loc.name, 'location_type': loc.location_type, 'uuid': loc._id, } loc_json = [loc_to_json(loc) for loc in root_locations(domain)] # if a location is selected, we need to pre-populate its location hierarchy # so that the data is available client-side to pre-populate the drop-downs if selected_loc_id: selected = Location.get(selected_loc_id) lineage = list(Location.view('_all_docs', keys=selected.path, include_docs=True)) parent = {'children': loc_json} for loc in lineage: # find existing entry in the json tree that corresponds to this loc this_loc = [k for k in parent['children'] if k['uuid'] == loc._id][0] this_loc['children'] = [loc_to_json(loc) for loc in loc.children] parent = this_loc return loc_json
def handle(self, *args, **options): try: loc_uuid = args[0] except IndexError: self.stderr.write('location uuid required\n') return try: loc = Location.get(loc_uuid) if not loc or loc.doc_type != 'Location': raise ValueError except Exception: self.stderr.write('doc [%s] does not appear to be a location\n' % loc_uuid) return self.db = get_db() startkey = [loc.domain] + loc.path locs = Location.view('locations/hierarchy', startkey=startkey, endkey=startkey + [{}], reduce=False, include_docs=True) for k in locs: if k._id == loc._id: # don't delete orig loc until very end, so we can resume task if interrupted continue self.delete_doc(k, loc) startkey = [loc.domain, loc._id] linked = self.db.view('locations/linked_docs', startkey=startkey, endkey=startkey + [{}], include_docs=True) for k in linked: self.delete_doc(k['doc'], loc) self.delete_doc(loc, loc)
def get_supply_point(domain, site_code=None, loc=None): if loc is None: loc = Location.view('commtrack/locations_by_code', key=[domain, site_code.lower()], include_docs=True).first() if loc: case = SupplyPointCase.get_by_location(loc)
def load_locs_json(domain, selected_loc_id=None): """initialize a json location tree for drill-down controls on the client. tree is only partially initialized and branches will be filled in on the client via ajax. what is initialized: * all top level locs * if a 'selected' loc is provided, that loc and its complete ancestry """ def loc_to_json(loc): return {"name": loc.name, "location_type": loc.location_type, "uuid": loc._id} loc_json = [loc_to_json(loc) for loc in root_locations(domain)] # if a location is selected, we need to pre-populate its location hierarchy # so that the data is available client-side to pre-populate the drop-downs selected = Location.get_in_domain(domain, selected_loc_id) if selected: lineage = list(Location.view("_all_docs", keys=selected.path, include_docs=True)) parent = {"children": loc_json} for loc in lineage: # find existing entry in the json tree that corresponds to this loc this_loc = [k for k in parent["children"] if k["uuid"] == loc._id][0] this_loc["children"] = [loc_to_json(loc) for loc in loc.children] parent = this_loc return loc_json
def get_data(self): data = list(super(StockStatusBySupplyPointDataSource, self).get_data()) products = dict((r['product_id'], r['product_name']) for r in data) product_ids = sorted(products.keys(), key=lambda e: products[e]) by_supply_point = map_reduce(lambda e: [(e['location_id'],)], data=data, include_docs=True) locs = dict((loc._id, loc) for loc in Location.view( '_all_docs', keys=by_supply_point.keys(), include_docs=True)) for loc_id, subcases in by_supply_point.iteritems(): loc = locs[loc_id] by_product = dict((c['product_id'], c) for c in subcases) rec = { 'name': loc.name, 'type': loc.location_type, 'geo': loc._geopoint, } for prod in product_ids: rec.update(dict(('%s-%s' % (prod, key), by_product.get(prod, {}).get(key)) for key in ('current_stock', 'consumption', 'months_remaining', 'category'))) yield rec
def get_district_by_name(self, name): locs = Location.view('locations/by_name', startkey=[self.domain, "DISTRICT", name], endkey=[self.domain, "DISTRICT", name], reduce=False, include_docs=True) return locs
def _delete_location_id(self, loc_uuid): try: loc = Location.get(loc_uuid) if not loc or loc.doc_type != 'Location': raise ValueError except Exception: self.stderr.write('doc [%s] does not appear to be a location\n' % loc_uuid) return self.db = get_db() startkey = [loc.domain] + loc.path locs = Location.view('locations/hierarchy', startkey=startkey, endkey=startkey + [{}], reduce=False, include_docs=True) for k in locs: if k._id == loc._id: # don't delete orig loc until very end, so we can resume task if interrupted continue self.delete_doc(k, loc) startkey = [loc.domain, loc._id] linked = self.db.view('locations/linked_docs', startkey=startkey, endkey=startkey + [{}], include_docs=True) success = True for k in linked: success = success and self.delete_doc(k['doc'], loc) if success: self.println('deleted location %s (%s)' % (loc._id, loc.name)) if not self.dryrun: self.db.delete_doc(loc) else: self.stderr.write('not deleting %s because there were errors' % loc._id)
def get_data(self): startkey = [self.domain, self.active_location._id if self.active_location else None] product_cases = SPPCase.view('commtrack/product_cases', startkey=startkey, endkey=startkey + [{}], include_docs=True) if self.program_id: product_cases = filter(lambda c: Product.get(c.product).program_id == self.program_id, product_cases) def latest_case(cases): # getting last report date should probably be moved to a util function in a case wrapper class return max(cases, key=lambda c: getattr(c, 'last_reported', datetime(2000, 1, 1)).date()) cases_by_site = map_reduce(lambda c: [(tuple(c.location_),)], lambda v: reporting_status(latest_case(v), self.start_date, self.end_date), data=product_cases, include_docs=True) # TODO if aggregating, won't want to fetch all these locs (will only want to fetch aggregation sites) locs = dict((loc._id, loc) for loc in Location.view( '_all_docs', keys=[path[-1] for path in cases_by_site.keys()], include_docs=True)) for path, status in cases_by_site.iteritems(): loc = locs[path[-1]] yield { 'loc_id': loc._id, 'loc_path': loc.path, 'name': loc.name, 'type': loc.location_type, 'reporting_status': status, 'geo': loc._geopoint, }
def _data(self): config = { 'domain': self.domain, 'location_id': self.request.GET.get('location_id'), 'startdate': self.datespan.startdate_utc, 'enddate': self.datespan.enddate_utc, 'request': self.request, } statuses = list(ReportingStatusDataSource(config).get_data()) def child_loc(path): root = self.active_location ix = path.index(root._id) if root else -1 try: return path[ix + 1] except IndexError: return None def case_iter(): for site in statuses: if child_loc(site['loc_path']) is not None: yield (site['loc_path'], site['reporting_status']) status_by_agg_site = map_reduce(lambda (path, status): [(child_loc(path), status)], data=case_iter()) sites_by_agg_site = map_reduce(lambda (path, status): [(child_loc(path), path[-1])], data=case_iter()) def status_tally(statuses): total = len(statuses) return map_reduce(lambda s: [(s,)], lambda v: {'count': len(v), 'pct': len(v) / float(total)}, data=statuses) status_counts = dict((loc_id, status_tally(statuses)) for loc_id, statuses in status_by_agg_site.iteritems()) master_tally = status_tally([site['reporting_status'] for site in statuses]) locs = sorted(Location.view('_all_docs', keys=status_counts.keys(), include_docs=True), key=lambda loc: loc.name) def fmt(pct): return '%.1f%%' % (100. * pct) def fmt_pct_col(loc, col_type): return fmt(status_counts[loc._id].get(col_type, {'pct': 0.})['pct']) def fmt_count_col(loc, col_type): return status_counts[loc._id].get(col_type, {'count': 0})['count'] def _rows(): for loc in locs: row = [loc.name, len(sites_by_agg_site[loc._id])] for k in ('reporting', 'nonreporting'): row.append(fmt_count_col(loc, k)) row.append(fmt_pct_col(loc, k)) yield row return master_tally, _rows()
def _data(self): config = { 'domain': self.domain, 'location_id': self.request.GET.get('location_id'), 'startdate': self.datespan.startdate_utc, 'enddate': self.datespan.enddate_utc, 'request': self.request, } statuses = list(ReportingStatusDataSource(config).get_data()) def child_loc(path): root = self.active_location ix = path.index(root._id) if root else -1 try: return path[ix + 1] except IndexError: return None def case_iter(): for site in statuses: if child_loc(site['loc_path']) is not None: yield (site['loc_path'], site['reporting_status']) status_by_agg_site = map_reduce(lambda (path, status): [(child_loc(path), status)], data=case_iter()) sites_by_agg_site = map_reduce(lambda (path, status): [(child_loc(path), path[-1])], data=case_iter()) def status_tally(statuses): total = len(statuses) return map_reduce(lambda s: [(s,)], lambda v: {'count': len(v), 'pct': len(v) / float(total)}, data=statuses) status_counts = dict((loc_id, status_tally(statuses)) for loc_id, statuses in status_by_agg_site.iteritems()) master_tally = status_tally([site['reporting_status'] for site in statuses]) locs = sorted(Location.view('_all_docs', keys=status_counts.keys(), include_docs=True), key=lambda loc: loc.name) def fmt(pct): return '%.1f%%' % (100. * pct) def fmt_pct_col(loc, col_type): return fmt(status_counts[loc._id].get(col_type, {'pct': 0.})['pct']) def fmt_count_col(loc, col_type): return status_counts[loc._id].get(col_type, {'count': 0})['count'] def _rows(): for loc in locs: row = [loc.name, len(sites_by_agg_site[loc._id])] for k in ('reporting', 'nonreporting'): row.append(fmt_count_col(loc, k)) row.append(fmt_pct_col(loc, k)) yield row return master_tally, _rows()
def get_supply_point(domain, site_code): loc = Location.view("commtrack/locations_by_code", key=[domain, site_code.lower()], include_docs=True).first() if loc: case = CommCareCase.view("commtrack/supply_point_by_loc", key=[domain, loc._id], include_docs=True).first() else: case = None return {"case": case, "location": loc}
def get_supply_point(domain, site_code): loc = Location.view('commtrack/locations_by_code', key=[domain, site_code.lower()], include_docs=True).first() if loc: case = CommCareCase.view('commtrack/supply_point_by_loc', key=[domain, loc._id], include_docs=True).first()
def _all_locations(domain): return Location.view( 'locations/hierarchy', startkey=[domain], endkey=[domain, {}], reduce=False, include_docs=True ).all()
def get_supply_point(domain, site_code=None, loc=None): if loc is None: loc = Location.view("commtrack/locations_by_code", key=[domain, site_code.lower()], include_docs=True).first() if loc: case = SupplyPointCase.get_by_location(loc) else: case = None return {"case": case, "location": loc}
def locs_by_domain(domain): res = Location.view( 'locations/by_name', startkey=[domain], endkey=[domain, {}], reduce=True, include_docs=False, ).all() return res[0].get('value', 0) if res else 0
def get_locs(type): # TODO use ES instead? q = query.lower() startkey = [domain, type, q] endkey = [domain, type, q + 'zzzzzz'] return Location.view('locations/by_name', startkey=startkey, endkey=endkey, limit=LIMIT, include_docs=True)
def link_locations(base_results): """annotate case results with info from linked location doc (if any)""" def _has_location(doc): return hasattr(doc, 'location_') and doc.location_ loc_ids = set(match.couch_doc.location_[-1] for match in base_results if _has_location(match.couch_doc)) locs = dict((loc._id, loc) for loc in Location.view('_all_docs', keys=list(loc_ids), include_docs=True)) for match in base_results: if _has_location(match.couch_doc): loc_id = match.couch_doc.location_[-1] match.couch_doc.linked_location = locs[loc_id]._doc
def get_locs(type): # TODO use ES instead? q = query.lower() startkey = [domain, type, q] endkey = [domain, type, q + 'zzzzzz'] return Location.view('locations/by_name', startkey=startkey, endkey=endkey, limit=LIMIT, reduce=False, include_docs=True, )
def location_dump(request, domain): loc_ids = [row['id'] for row in Location.view('commtrack/locations_by_code', startkey=[domain], endkey=[domain, {}])] resp = HttpResponse(content_type='text/csv') resp['Content-Disposition'] = 'attachment; filename="locations_%s.csv"' % domain w = csv.writer(resp) w.writerow(['UUID', 'Location Type', 'SMS Code']) for raw in iter_docs(Location.get_db(), loc_ids): loc = Location.wrap(raw) w.writerow([loc._id, loc.location_type, loc.site_code]) return resp
def link_locations(base_results): """annotate case results with info from linked location doc (if any)""" def _has_location(doc): return hasattr(doc, 'location_') and doc.location_ loc_ids = set(match.couch_doc.location_[-1] for match in base_results if _has_location(match.couch_doc)) locs = dict((loc._id, loc) for loc in Location.view('_all_docs', keys=list(loc_ids), include_docs=True)) for match in base_results: if _has_location(match.couch_doc): loc_id = match.couch_doc.location_[-1] match.couch_doc.linked_location = locs[loc_id]._doc
def location_dump(request, domain): loc_ids = [row['id'] for row in Location.view('commtrack/locations_by_code', startkey=[domain], endkey=[domain, {}])] resp = HttpResponse(content_type='text/csv') resp['Content-Disposition'] = 'attachment; filename="locations_%s.csv"' % domain w = csv.writer(resp) w.writerow(['UUID', 'Location Type', 'SMS Code']) for raw in iter_docs(Location.get_db(), loc_ids): loc = Location.wrap(raw) w.writerow([loc._id, loc.location_type, loc.site_code]) return resp
def sync_ilsgateway_location(domain, endpoint, ilsgateway_location): location = Location.view('commtrack/locations_by_code', key=[domain, ilsgateway_location.code.lower()], include_docs=True).first() if not location: if ilsgateway_location.parent: loc_parent = SupplyPointCase.view('hqcase/by_domain_external_id', key=[domain, str(ilsgateway_location.parent)], reduce=False, include_docs=True).first() if not loc_parent: parent = endpoint.get_location(ilsgateway_location.parent) loc_parent = sync_ilsgateway_location(domain, endpoint, Loc.from_json(parent)) else: loc_parent = loc_parent.location location = Location(parent=loc_parent) else: location = Location() location.lineage = [] location.domain = domain location.name = ilsgateway_location.name if ilsgateway_location.groups: location.metadata = {'groups': ilsgateway_location.groups} if ilsgateway_location.latitude: location.latitude = float(ilsgateway_location.latitude) if ilsgateway_location.longitude: location.longitude = float(ilsgateway_location.longitude) location.location_type = ilsgateway_location.type location.site_code = ilsgateway_location.code location.external_id = str(ilsgateway_location.id) location.save() if not SupplyPointCase.get_by_location(location): SupplyPointCase.create_from_location(domain, location) else: location_dict = { 'name': ilsgateway_location.name, 'latitude': float(ilsgateway_location.latitude) if ilsgateway_location.latitude else None, 'longitude': float(ilsgateway_location.longitude) if ilsgateway_location.longitude else None, 'type': ilsgateway_location.type, 'site_code': ilsgateway_location.code.lower(), 'external_id': str(ilsgateway_location.id), } case = SupplyPointCase.get_by_location(location) if apply_updates(location, location_dict): location.save() if case: case.update_from_location(location) else: SupplyPointCase.create_from_location(domain, location) return location
def sync_ilsgateway_location(domain, endpoint, ilsgateway_location): location = Location.view('commtrack/locations_by_code', key=[domain, ilsgateway_location.code.lower()], include_docs=True).first() if not location: if ilsgateway_location.parent: loc_parent = SupplyPointCase.view('hqcase/by_domain_external_id', key=[domain, str(ilsgateway_location.parent)], reduce=False, include_docs=True).first() if not loc_parent: parent = endpoint.get_location(ilsgateway_location.parent) loc_parent = sync_ilsgateway_location(domain, endpoint, Loc.from_json(parent)) else: loc_parent = loc_parent.location location = Location(parent=loc_parent) else: location = Location() location.lineage = [] location.domain = domain location.name = ilsgateway_location.name if ilsgateway_location.groups: location.metadata = {'groups': ilsgateway_location.groups} if ilsgateway_location.latitude: location.latitude = float(ilsgateway_location.latitude) if ilsgateway_location.longitude: location.longitude = float(ilsgateway_location.longitude) location.location_type = ilsgateway_location.type location.site_code = ilsgateway_location.code location.external_id = str(ilsgateway_location.id) location.save() if not SupplyPointCase.get_by_location(location): SupplyPointCase.create_from_location(domain, location) else: location_dict = { 'name': ilsgateway_location.name, 'latitude': float(ilsgateway_location.latitude) if ilsgateway_location.latitude else None, 'longitude': float(ilsgateway_location.longitude) if ilsgateway_location.longitude else None, 'type': ilsgateway_location.type, 'site_code': ilsgateway_location.code.lower(), 'external_id': str(ilsgateway_location.id), } case = SupplyPointCase.get_by_location(location) if apply_updates(location, location_dict): location.save() if case: case.update_from_location(location) else: SupplyPointCase.create_from_location(domain, location)
def get_supply_point(domain, site_code): loc = Location.view('commtrack/locations_by_code', key=[domain, site_code.lower()], include_docs=True).first() if loc: case = CommCareCase.view('commtrack/supply_point_by_loc', key=[domain, loc._id], include_docs=True).first() else: case = None return { 'case': case, 'location': loc, }
def _data(self): startkey = [self.domain, self.active_location._id if self.active_location else None] product_cases = CommCareCase.view('commtrack/product_cases', startkey=startkey, endkey=startkey + [{}], include_docs=True) def latest_case(cases): # getting last report date should probably be moved to a util function in a case wrapper class return max(cases, key=lambda c: getattr(c, 'last_reported', datetime(2000, 1, 1)).date()) cases_by_site = map_reduce(lambda c: [(tuple(c.location_),)], lambda v: reporting_status(latest_case(v)), data=product_cases, include_docs=True) def child_loc(path): root = self.active_location ix = path.index(root._id) if root else -1 try: return path[ix + 1] except IndexError: return None def case_iter(): for k, v in cases_by_site.iteritems(): if child_loc(k) is not None: yield (k, v) status_by_agg_site = map_reduce(lambda (path, status): [(child_loc(path), status)], data=case_iter()) sites_by_agg_site = map_reduce(lambda (path, status): [(child_loc(path), path[-1])], data=case_iter()) def status_tally(statuses): total = len(statuses) return map_reduce(lambda s: [(s,)], lambda v: {'count': len(v), 'pct': len(v) / float(total)}, data=statuses) status_counts = dict((loc_id, status_tally(statuses)) for loc_id, statuses in status_by_agg_site.iteritems()) master_tally = status_tally(cases_by_site.values()) locs = sorted(Location.view('_all_docs', keys=status_counts.keys(), include_docs=True), key=lambda loc: loc.name) def fmt(pct): return '%.1f%%' % (100. * pct) def fmt_col(loc, col_type): return fmt(status_counts[loc._id].get(col_type, {'pct': 0.})['pct']) def _rows(): for loc in locs: num_sites = len(sites_by_agg_site[loc._id]) yield [loc.name, len(sites_by_agg_site[loc._id])] + [fmt_col(loc, k) for k in ('ontime', 'late', 'nonreporting')] return master_tally, _rows()
def get_supply_point(domain, site_code=None, loc=None): if loc is None: loc = Location.view('commtrack/locations_by_code', key=[domain, site_code.lower()], include_docs=True).first() if loc: # todo: should probably return a SupplyPointCase # by calling SupplyPointCase.get_by_location case = CommCareCase.view('commtrack/supply_point_by_loc', key=[domain, loc._id], include_docs=True).first() else: case = None return { 'case': case, 'location': loc, }
def handle(self, *args, **options): try: domain = args[0] except IndexError: self.stderr.write('domain required\n') return self.println('Migrating...') supply_point_cases = CommCareCase.get_db().view( 'commtrack/supply_point_by_loc', startkey=[domain], endkey=[domain, {}], include_docs=True ) for result in supply_point_cases: loc_id = result['key'][-1] loc = Location.get(loc_id) case = result['doc'] old_code = case.get('site_code', '') new_code = getattr(loc, 'site_code', '') if old_code and not new_code: loc.site_code = old_code loc.save() self.println('migrated %s (%s)' % (loc.name, loc.site_code)) self.println('Verifying code uniqueness...') all_codes = Location.get_db().view('commtrack/locations_by_code', startkey=[domain], endkey=[domain, {}]) locs_by_code = map_reduce(lambda e: [(e['key'][-1].lower(), e['id'])], data=all_codes) for code, loc_ids in locs_by_code.iteritems(): if len(loc_ids) == 1: continue self.println('duplicate code [%s]' % code) locs = Location.view('_all_docs', keys=loc_ids, include_docs=True) for loc in locs: self.println(' %s [%s]' % (loc.name, loc._id))
def _delete_location_id(self, loc_uuid): try: loc = Location.get(loc_uuid) if not loc or loc.doc_type != 'Location': raise ValueError except Exception: self.stderr.write('doc [%s] does not appear to be a location\n' % loc_uuid) return self.db = get_db() startkey = [loc.domain] + loc.path locs = Location.view('locations/hierarchy', startkey=startkey, endkey=startkey + [{}], reduce=False, include_docs=True) for k in locs: if k._id == loc._id: # don't delete orig loc until very end, so we can resume task if interrupted continue self.delete_doc(k, loc) startkey = [loc.domain, loc._id] linked = self.db.view('locations/linked_docs', startkey=startkey, endkey=startkey + [{}], include_docs=True) success = True for k in linked: success = success and self.delete_doc(k['doc'], loc) if success: self.println('deleted location %s (%s)' % (loc._id, loc.name)) if not self.dryrun: self.db.delete_doc(loc) else: self.stderr.write('not deleting %s because there were errors' % loc._id)
def load_locs(loc_ids): return dict((loc._id, loc) for loc in Location.view('_all_docs', keys=list(loc_ids), include_docs=True))
def _data(self): startkey = [ self.domain, self.active_location._id if self.active_location else None ] product_cases = CommCareCase.view('commtrack/product_cases', startkey=startkey, endkey=startkey + [{}], include_docs=True) def latest_case(cases): # getting last report date should probably be moved to a util function in a case wrapper class return max(cases, key=lambda c: getattr(c, 'last_reported', datetime(2000, 1, 1)).date()) cases_by_site = map_reduce(lambda c: [(tuple(c.location_), )], lambda v: reporting_status(latest_case(v)), data=product_cases, include_docs=True) def child_loc(path): root = self.active_location ix = path.index(root._id) if root else -1 try: return path[ix + 1] except IndexError: return None def case_iter(): for k, v in cases_by_site.iteritems(): if child_loc(k) is not None: yield (k, v) status_by_agg_site = map_reduce( lambda (path, status): [(child_loc(path), status)], data=case_iter()) sites_by_agg_site = map_reduce( lambda (path, status): [(child_loc(path), path[-1])], data=case_iter()) def status_tally(statuses): total = len(statuses) return map_reduce(lambda s: [(s, )], lambda v: { 'count': len(v), 'pct': len(v) / float(total) }, data=statuses) status_counts = dict( (loc_id, status_tally(statuses)) for loc_id, statuses in status_by_agg_site.iteritems()) master_tally = status_tally(cases_by_site.values()) locs = sorted(Location.view('_all_docs', keys=status_counts.keys(), include_docs=True), key=lambda loc: loc.name) def fmt(pct): return '%.1f%%' % (100. * pct) def fmt_col(loc, col_type): return fmt(status_counts[loc._id].get(col_type, {'pct': 0.})['pct']) def _rows(): for loc in locs: num_sites = len(sites_by_agg_site[loc._id]) yield [loc.name, len(sites_by_agg_site[loc._id])] + [ fmt_col(loc, k) for k in ('ontime', 'late', 'nonreporting') ] return master_tally, _rows()
def load_locs(loc_ids): return dict((loc._id, loc) for loc in Location.view( '_all_docs', keys=list(loc_ids), include_docs=True))