def populate_report_data(start_date, end_date, domain=None): # first populate all the warehouse tables for all facilities # hard coded to know this is the first date with data start_date = max(start_date, default_start_date()) # For QA purposes generate reporting data for only some small part of data. if TEST: locations = _get_test_locations(domain) facilities = filter(lambda location: location.location_type == 'FACILITY', locations) non_facilities_types = ['DISTRICT', 'REGION', 'MOHSW'] non_facilities = filter(lambda location: location.location_type in non_facilities_types, locations) else: facilities = Location.filter_by_type(domain, 'FACILITY') non_facilities = list(Location.filter_by_type(domain, 'DISTRICT')) non_facilities += list(Location.filter_by_type(domain, 'REGION')) non_facilities += list(Location.filter_by_type(domain, 'MOHSW')) for fac in facilities: process_facility_warehouse_data(fac, start_date, end_date) # then populate everything above a facility off a warehouse table for org in non_facilities: process_non_facility_warehouse_data(org, start_date, end_date) # finally go back through the history and initialize empty data for any # newly created facilities update_historical_data(domain)
def populate_report_data(start_date, end_date, domain, runner, locations=None, strict=True): # first populate all the warehouse tables for all facilities # hard coded to know this is the first date with data start_date = max(start_date, default_start_date()) # For QA purposes generate reporting data for only some small part of data. if not ILSGatewayConfig.for_domain(domain).all_stock_data: if locations is None: locations = _get_test_locations(domain) facilities = filter( lambda location: location.location_type == 'FACILITY', locations) non_facilities_types = ['DISTRICT', 'REGION', 'MSDZONE', 'MOHSW'] non_facilities = [] for location_type in non_facilities_types: non_facilities.extend( filter( lambda location: location.location_type == location_type, locations)) else: facilities = Location.filter_by_type(domain, 'FACILITY') non_facilities = list(Location.filter_by_type(domain, 'DISTRICT')) non_facilities += list(Location.filter_by_type(domain, 'REGION')) non_facilities += list(Location.filter_by_type(domain, 'MSDZONE')) non_facilities += list(Location.filter_by_type(domain, 'MOHSW')) if runner.location: if runner.location.location_type.name.upper() != 'FACILITY': facilities = [] non_facilities = itertools.dropwhile( lambda location: location.location_id != runner.location. location_id, non_facilities) else: facilities = itertools.dropwhile( lambda location: location.location_id != runner.location. location_id, facilities) facilities_chunked_list = chunked(facilities, 5) for chunk in facilities_chunked_list: res = chain( process_facility_warehouse_data.si(fac, start_date, end_date, runner) for fac in chunk)() res.get() non_facilities_chunked_list = chunked(non_facilities, 50) # then populate everything above a facility off a warehouse table for chunk in non_facilities_chunked_list: res = chain( process_non_facility_warehouse_data.si( org, start_date, end_date, runner, strict) for org in chunk)() res.get() runner.location = None runner.save()
def sync_facilities(request, domain): commtrack_settings = request.project.commtrack_settings # create Facility Registry and Facility LocationTypes if they don't exist if not any(lt.name == 'Facility Registry' for lt in commtrack_settings.location_types): commtrack_settings.location_types.extend([ LocationType(name='Facility Registry', allowed_parents=['']), LocationType(name='Facility', allowed_parents=['Facility Registry']) ]) commtrack_settings.save() registry_locs = dict( (l.external_id, l) for l in Location.filter_by_type(domain, 'Facility Registry')) # sync each registry and add/update Locations for each Facility for registry in FacilityRegistry.by_domain(domain): registry.sync_with_remote() try: registry_loc = registry_locs[registry.url] except KeyError: registry_loc = Location(domain=domain, location_type='Facility Registry', external_id=registry.url) registry_loc.name = registry.name registry_loc.save() registry_loc._seen = True facility_locs = dict( (l.external_id, l) for l in Location.filter_by_type(domain, 'Facility', registry_loc)) for facility in registry.get_facilities(): uuid = facility.data['uuid'] try: facility_loc = facility_locs[uuid] except KeyError: facility_loc = Location(domain=domain, location_type='Facility', external_id=uuid, parent=registry_loc) facility_loc.name = facility.data.get('name', 'Unnamed Facility') facility_loc.save() facility_loc._seen = True for id, f in facility_locs.iteritems(): if not hasattr(f, '_seen'): f.delete() for id, r in registry_locs.iteritems(): if not hasattr(r, '_seen'): r.delete() return HttpResponse('OK')
def test_location_queries(self): test_state1 = make_loc("teststate1", type="state", parent=self.user.location, domain=self.domain.name) test_state2 = make_loc("teststate2", type="state", parent=self.user.location, domain=self.domain.name) test_village1 = make_loc("testvillage1", type="village", parent=test_state1, domain=self.domain.name) test_village1.site_code = "tv1" test_village1.save() test_village2 = make_loc("testvillage2", type="village", parent=test_state2, domain=self.domain.name) def compare(list1, list2): self.assertEqual(set([l._id for l in list1]), set([l._id for l in list2])) # descendants compare([test_state1, test_state2, test_village1, test_village2], self.user.location.descendants) # children compare([test_state1, test_state2], self.user.location.children) # siblings compare([test_state2], test_state1.siblings()) # parent and parent_id self.assertEqual(self.user.location._id, test_state1.parent_id) self.assertEqual(self.user.location._id, test_state1.parent._id) # is_root self.assertTrue(self.user.location.is_root) self.assertFalse(test_state1.is_root) # Location.root_locations compare([self.user.location], Location.root_locations(self.domain.name)) # Location.filter_by_type compare([test_village1, test_village2], Location.filter_by_type(self.domain.name, "village")) compare([test_village1], Location.filter_by_type(self.domain.name, "village", test_state1)) # Location.get_in_domain test_village2.domain = "rejected" bootstrap_location_types("rejected") test_village2.save() self.assertEqual(Location.get_in_domain(self.domain.name, test_village1._id)._id, test_village1._id) self.assertIsNone(Location.get_in_domain(self.domain.name, test_village2._id)) self.assertIsNone(Location.get_in_domain(self.domain.name, "not-a-real-id")) self.assertEqual( {loc._id for loc in [self.user.location, test_state1, test_state2, test_village1]}, set(SQLLocation.objects.filter(domain=self.domain.name).location_ids()), ) # Location.by_site_code self.assertEqual(test_village1._id, Location.by_site_code(self.domain.name, "tv1")._id) self.assertIsNone(None, Location.by_site_code(self.domain.name, "notreal")) # Location.by_domain compare([self.user.location, test_state1, test_state2, test_village1], Location.by_domain(self.domain.name))
def populate_report_data(start_date, end_date, domain, runner, locations=None, strict=True): # first populate all the warehouse tables for all facilities # hard coded to know this is the first date with data start_date = max(start_date, default_start_date()) # For QA purposes generate reporting data for only some small part of data. if not ILSGatewayConfig.for_domain(domain).all_stock_data: if locations is None: locations = _get_test_locations(domain) facilities = filter(lambda location: location.location_type == 'FACILITY', locations) non_facilities_types = ['DISTRICT', 'REGION', 'MSDZONE', 'MOHSW'] non_facilities = [] for location_type in non_facilities_types: non_facilities.extend(filter(lambda location: location.location_type == location_type, locations)) else: facilities = Location.filter_by_type(domain, 'FACILITY') non_facilities = list(Location.filter_by_type(domain, 'DISTRICT')) non_facilities += list(Location.filter_by_type(domain, 'REGION')) non_facilities += list(Location.filter_by_type(domain, 'MSDZONE')) non_facilities += list(Location.filter_by_type(domain, 'MOHSW')) if runner.location: if runner.location.location_type.name.upper() != 'FACILITY': facilities = [] non_facilities = itertools.dropwhile( lambda location: location._id != runner.location.location_id, non_facilities ) else: facilities = itertools.dropwhile( lambda location: location._id != runner.location.location_id, facilities ) facilities_chunked_list = chunked(facilities, 5) for chunk in facilities_chunked_list: res = chain(process_facility_warehouse_data.si(fac, start_date, end_date, runner) for fac in chunk)() res.get() non_facilities_chunked_list = chunked(non_facilities, 50) # then populate everything above a facility off a warehouse table for chunk in non_facilities_chunked_list: res = chain( process_non_facility_warehouse_data.si(org, start_date, end_date, runner, strict) for org in chunk )() res.get() runner.location = None runner.save() # finally go back through the history and initialize empty data for any # newly created facilities update_historical_data(domain)
def sync_facilities(request, domain): commtrack_settings = request.project.commtrack_settings # create Facility Registry and Facility LocationTypes if they don't exist if not any(lt.name == 'Facility Registry' for lt in commtrack_settings.location_types): commtrack_settings.location_types.extend([ LocationType(name='Facility Registry', allowed_parents=['']), LocationType(name='Facility', allowed_parents=['Facility Registry']) ]) commtrack_settings.save() registry_locs = dict((l.external_id, l) for l in Location.filter_by_type(domain, 'Facility Registry')) # sync each registry and add/update Locations for each Facility for registry in FacilityRegistry.by_domain(domain): registry.sync_with_remote() try: registry_loc = registry_locs[registry.url] except KeyError: registry_loc = Location( domain=domain, location_type='Facility Registry', external_id=registry.url) registry_loc.name = registry.name registry_loc.save() registry_loc._seen = True facility_locs = dict((l.external_id, l) for l in Location.filter_by_type(domain, 'Facility', registry_loc)) for facility in registry.get_facilities(): uuid = facility.data['uuid'] try: facility_loc = facility_locs[uuid] except KeyError: facility_loc = Location( domain=domain, location_type='Facility', external_id=uuid, parent=registry_loc) facility_loc.name = facility.data.get('name', 'Unnamed Facility') facility_loc.save() facility_loc._seen = True for id, f in facility_locs.iteritems(): if not hasattr(f, '_seen'): f.delete() for id, r in registry_locs.iteritems(): if not hasattr(r, '_seen'): r.delete() return HttpResponse('OK')
def populate_report_data(start_date, end_date, domain, runner, locations=None, strict=True): # first populate all the warehouse tables for all facilities # hard coded to know this is the first date with data start_date = max(start_date, default_start_date()) # For QA purposes generate reporting data for only some small part of data. if not ILSGatewayConfig.for_domain(domain).all_stock_data: if locations is None: locations = _get_test_locations(domain) facilities = filter(lambda location: location.location_type == "FACILITY", locations) non_facilities_types = ["DISTRICT", "REGION", "MSDZONE", "MOHSW"] non_facilities = [] for location_type in non_facilities_types: non_facilities.extend(filter(lambda location: location.location_type == location_type, locations)) else: facilities = Location.filter_by_type(domain, "FACILITY") non_facilities = list(Location.filter_by_type(domain, "DISTRICT")) non_facilities += list(Location.filter_by_type(domain, "REGION")) non_facilities += list(Location.filter_by_type(domain, "MSDZONE")) non_facilities += list(Location.filter_by_type(domain, "MOHSW")) if runner.location: if runner.location.location_type.name.upper() != "FACILITY": facilities = [] non_facilities = itertools.dropwhile( lambda location: location.location_id != runner.location.location_id, non_facilities ) else: facilities = itertools.dropwhile( lambda location: location.location_id != runner.location.location_id, facilities ) facilities_chunked_list = chunked(facilities, 5) for chunk in facilities_chunked_list: res = chain(process_facility_warehouse_data.si(fac, start_date, end_date, runner) for fac in chunk)() res.get() non_facilities_chunked_list = chunked(non_facilities, 50) # then populate everything above a facility off a warehouse table for chunk in non_facilities_chunked_list: res = chain( process_non_facility_warehouse_data.si(org, start_date, end_date, runner, strict) for org in chunk )() res.get() runner.location = None runner.save()
def any_bad_outlets(self, domain): outlets = Location.filter_by_type(domain.name, 'outlet') for outlet in outlets: if outlet.parent.location_type == 'block': return True return False
def get_default_column_data(domain, location_types): data = { 'headers': {}, 'values': {} } if Domain.get_by_name(domain).commtrack_settings.individual_consumption_defaults: products = Product.by_domain(domain, wrap=False) for loc_type in location_types: loc = get_loc_config(domain)[loc_type] if not loc.administrative: data['headers'][loc_type] = [ 'default_' + p['code_'] for p in products ] locations = Location.filter_by_type(domain, loc_type) for loc in locations: data['values'][loc._id] = [ get_default_consumption( domain, p['_id'], loc_type, loc._id ) or '' for p in products ] else: data['headers'][loc_type] = [] return data
def get_by_name(self, loc_name, loc_type, parent): key = (loc_type, parent._id if parent else None) if key not in self._existing_by_type: existing = list(Location.filter_by_type(self.domain, loc_type, parent)) self._existing_by_type[key] = dict((l.name, l) for l in existing) self._existing_by_id.update(dict((l._id, l) for l in existing)) return self._existing_by_type[key].get(loc_name, None)
def dump_locations(response, domain): file = StringIO() writer = Excel2007ExportWriter() location_types = defined_location_types(domain) common_types = ["id", "name", "parent_id", "latitude", "longitude"] writer.open( header_table=[ (loc_type, [common_types + get_custom_property_names(domain, loc_type)]) for loc_type in location_types ], file=file, ) for loc_type in location_types: tab_rows = [] locations = Location.filter_by_type(domain, loc_type) for loc in locations: parent_id = loc.parent._id if loc.parent else "" custom_prop_values = [ loc[prop.name] or "" for prop in location_custom_properties(domain, loc.location_type) ] tab_rows.append( [loc._id, loc.name, parent_id, loc.latitude or "", loc.longitude or ""] + custom_prop_values ) writer.write([(loc_type, tab_rows)]) writer.close() response.write(file.getvalue())
def total(self, form): loc = Location.get(get_location_id_by_type(form=form, type=u'r\xe9gion')) count = list(Location.filter_by_type(form.domain, 'PPS', loc)).__len__() yield { 'date': form_date(form), 'value': count }
def handle(self, *args, **options): for location_type in ["wholesaler", "retailer"]: for location in Location.filter_by_type("colalifezambia", location_type): supply_point_case = SupplyPointCase.get_by_location(location) supply_point_case.location_type = location_type supply_point_case.save()
def get_default_column_data(domain, location_types): data = { 'headers': {}, 'values': {} } if Domain.get_by_name(domain).commtrack_settings.individual_consumption_defaults: products = Product.by_domain(domain) for loc_type in location_types: loc = get_loc_config(domain)[loc_type] if not loc.administrative: data['headers'][loc_type] = [ 'default_' + p.code for p in products ] locations = Location.filter_by_type(domain, loc_type) for loc in locations: sp = SupplyPointCase.get_or_create_by_location(loc) data['values'][loc._id] = [ get_default_monthly_consumption( domain, p._id, loc_type, sp._id ) or '' for p in products ] else: data['headers'][loc_type] = [] return data
def get_default_column_data(domain, location_types): data = { 'headers': {}, 'values': {} } if Domain.get_by_name(domain).commtrack_settings.individual_consumption_defaults: products = Product.by_domain(domain) for loc_type in location_types: loc = get_loc_config(domain)[loc_type] if not loc.administrative: data['headers'][loc_type] = [ 'default_' + p.code for p in products ] locations = Location.filter_by_type(domain, loc_type) for loc in locations: sp = SupplyPointCase.get_or_create_by_location(loc) data['values'][loc._id] = [ get_default_consumption( domain, p._id, loc_type, sp._id ) or '' for p in products ] else: data['headers'][loc_type] = [] return data
def get_by_name(self, loc_name, loc_type, parent): key = (loc_type, parent._id if parent else None) if key not in self._existing_by_type: existing = Location.filter_by_type(self.domain, loc_type, parent) self._existing_by_type[key] = dict((l.name, l) for l in existing) self._existing_by_id.update(dict((l._id, l) for l in existing)) return self._existing_by_type[key].get(loc_name, None)
def handle(self, *args, **options): with open('parentage_results.csv', 'wb+') as csvfile: csv_writer = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow([ 'id', 'name', 'is_test', 'location_type', 'number_of_offending_locations', ]) domains = Domain.get_all() for d in domains: if d.commtrack_enabled: for loc_type in d.commtrack_settings.location_types: if len(loc_type.allowed_parents) > 1: count = len( list( Location.filter_by_type( d.name, loc_type.name, ))) csv_writer.writerow([ d._id, d.name, d.is_test, loc_type.name, count ])
def any_bad_outlets(self, domain): outlets = Location.filter_by_type(domain.name, 'outlet') for outlet in outlets: if outlet.parent.location_type == 'block': return True return False
def dump_locations(response, domain, include_consumption=False): file = StringIO() writer = Excel2007ExportWriter() location_types = defined_location_types(domain) if include_consumption: defaults = get_default_column_data(domain, location_types) else: defaults = { 'headers': {}, 'values': {} } common_types = ['site_code', 'name', 'parent_site_code', 'latitude', 'longitude'] writer.open( header_table=[ (loc_type, [ common_types + get_custom_property_names(domain, loc_type, common_types) + defaults['headers'].get(loc_type, []) ]) for loc_type in location_types ], file=file, ) for loc_type in location_types: tab_rows = [] locations = Location.filter_by_type(domain, loc_type) for loc in locations: parent_site_code = loc.parent.site_code if loc.parent else '' custom_prop_values = [] for prop in location_custom_properties(domain, loc.location_type): if prop.name not in common_types: custom_prop_values.append( loc[prop.name] or '' ) if loc._id in defaults['values']: default_column_values = defaults['values'][loc._id] else: default_column_values = [] tab_rows.append( [ loc.site_code, loc.name, parent_site_code, loc.latitude or '', loc.longitude or '' ] + custom_prop_values + default_column_values ) writer.write([(loc_type, tab_rows)]) writer.close() response.write(file.getvalue())
def dump_locations(response, domain, include_consumption=False): file = StringIO() writer = Excel2007ExportWriter() location_types = defined_location_types(domain) if include_consumption: defaults = get_default_column_data(domain, location_types) else: defaults = { 'headers': {}, 'values': {} } common_types = ['site_code', 'name', 'parent_site_code', 'latitude', 'longitude'] writer.open( header_table=[ (loc_type, [ common_types + get_custom_property_names(domain, loc_type, common_types) + defaults['headers'].get(loc_type, []) ]) for loc_type in location_types ], file=file, ) for loc_type in location_types: tab_rows = [] locations = Location.filter_by_type(domain, loc_type) for loc in locations: parent_site_code = loc.parent.site_code if loc.parent else '' custom_prop_values = [] for prop in location_custom_properties(domain, loc.location_type): if prop.name not in common_types: custom_prop_values.append( loc[prop.name] or '' ) if loc._id in defaults['values']: default_column_values = defaults['values'][loc._id] else: default_column_values = [] tab_rows.append( [ loc.site_code, loc.name, parent_site_code, loc.latitude or '', loc.longitude or '' ] + custom_prop_values + default_column_values ) writer.write([(loc_type, tab_rows)]) writer.close() response.write(file.getvalue())
def populate_report_data(start_date, end_date, domain=None): # first populate all the warehouse tables for all facilities # hard coded to know this is the first date with data start_date = max(start_date, default_start_date()) facilities = Location.filter_by_type(domain, 'FACILITY') for fac in facilities: process_facility_warehouse_data(fac, start_date, end_date) # then populate everything above a facility off a warehouse table non_facilities = list(Location.filter_by_type(domain, 'DISTRICT')) non_facilities += list(Location.filter_by_type(domain, 'MOHSW')) non_facilities += list(Location.filter_by_type(domain, 'REGION')) for org in non_facilities: process_non_facility_warehouse_data(org, start_date, end_date) # finally go back through the history and initialize empty data for any # newly created facilities update_historical_data(domain)
def handle(self, *args, **options): for retailer_location in Location.filter_by_type( "colalifezambia", "retailer"): retailer_case = SupplyPointCase.get_by_location(retailer_location) wholesaler_location = retailer_location.parent wholesaler_case = SupplyPointCase.get_by_location( wholesaler_location) retailer_case.wholesaler_case_id = wholesaler_case._id retailer_case.save()
def handle(self, *args, **options): for retailer_location in Location.filter_by_type( "colalifezambia", "retailer"): retailer_case = SupplyPointCase.get_by_location(retailer_location) wholesaler_location = retailer_location.parent wholesaler_case = SupplyPointCase.get_by_location( wholesaler_location ) retailer_case.wholesaler_case_id = wholesaler_case._id retailer_case.save()
def _loc_type_dict(self, loc_type): uncategorized_keys = set() tab_rows = [] for loc in Location.filter_by_type(self.domain, loc_type.name): model_data, uncategorized_data = \ self.data_model.get_model_and_uncategorized(loc.metadata) uncategorized_keys.update(uncategorized_data.keys()) loc_dict = { 'location_id': loc.location_id, 'site_code': loc.site_code, 'name': loc.name, 'parent_site_code': loc.parent.site_code if loc.parent else '', 'latitude': loc.latitude or '', 'longitude': loc.longitude or '', 'data': model_data, 'uncategorized_data': uncategorized_data, 'consumption': self.get_consumption(loc), } if self.new_bulk_management_enabled: loc_dict.update({ LOCATION_SHEET_HEADERS['external_id']: loc.external_id, LOCATION_SHEET_HEADERS['do_delete']: '' }) tab_rows.append(dict(flatten_json(loc_dict))) tab_headers = ['site_code', 'name', 'parent_site_code', 'latitude', 'longitude'] if self.include_ids: tab_headers = ['location_id'] + tab_headers if self.new_bulk_management_enabled: tab_headers = ['location_id', 'site_code', 'name', 'parent_code', 'latitude', 'longitude', 'external_id', 'do_delete'] tab_headers = [LOCATION_SHEET_HEADERS[h] for h in tab_headers] def _extend_headers(prefix, headers): tab_headers.extend(json_to_headers( {prefix: {header: None for header in headers}} )) _extend_headers('data', (f.slug for f in self.data_model.fields)) _extend_headers('uncategorized_data', uncategorized_keys) if self.include_consumption_flag and loc_type.name not in self.administrative_types: _extend_headers('consumption', self.product_codes) if self.new_bulk_management_enabled: sheet_title = loc_type.code else: sheet_title = loc_type.name return (sheet_title, { 'headers': tab_headers, 'rows': tab_rows, })
def rows(self): rows = [] if not self.config['products']: prd_id = SQLProduct.objects.filter(domain=self.config['domain'])\ .order_by('code').values_list('product_id') else: prd_id = self.config['products'] if self.config['location_id']: location = Location.get(self.config['location_id']) for loc in location.children: org_summary = OrganizationSummary.objects.filter(date__range=(self.config['startdate'], self.config['enddate']), supply_point=loc._id)[0] soh_data = GroupSummary.objects.get(title=SupplyPointStatusTypes.SOH_FACILITY, org_summary=org_summary) facs = Location.filter_by_type(self.config['domain'], 'FACILITY', loc) facs_count = (float(len(list(facs))) or 1) soh_on_time = soh_data.on_time * 100 / facs_count soh_late = soh_data.late * 100 / facs_count soh_not_responding = soh_data.not_responding * 100 / facs_count fac_ids = get_relevant_supply_point_ids(self.config['domain'], loc) stockouts = (StockTransaction.objects.filter( case_id__in=fac_ids, quantity__lte=0, report__date__month=int(self.config['month']), report__date__year=int(self.config['year'])).count() or 0) / facs_count url = make_url( StockOnHandReport, self.config['domain'], '?location_id=%s&month=%s&year=%s', (loc._id, self.config['month'], self.config['year'])) row_data = [ link_format(loc.name, url), format_percent(soh_on_time), format_percent(soh_late), format_percent(soh_not_responding), format_percent(stockouts) ] for product in prd_id: ps = ProductAvailabilityData.objects.filter( supply_point=loc._id, product=product, date=self.config['startdate']) if ps: row_data.append(format_percent(ps[0].without_stock * 100 / float(ps[0].total))) else: row_data.append("<span class='no_data'>None</span>") rows.append(row_data) return rows
def populate_report_data(start_date, end_date, domain, runner, strict=True): facilities = SQLLocation.objects.filter( location_type__name='FACILITY', domain=domain, created_at__lt=end_date).order_by('pk').couch_locations() non_facilities = list(Location.filter_by_type(domain, 'DISTRICT')) non_facilities += list(Location.filter_by_type(domain, 'REGION')) non_facilities += list(Location.filter_by_type(domain, 'MSDZONE')) non_facilities += list(Location.filter_by_type(domain, 'MOHSW')) if runner.location: if runner.location.location_type.name.upper() != 'FACILITY': facilities = [] non_facilities = itertools.dropwhile( lambda location: location.location_id != runner.location. location_id, non_facilities) else: facilities = itertools.dropwhile( lambda location: location.location_id != runner.location. location_id, facilities) facilities_chunked_list = chunked(facilities, 5) for chunk in facilities_chunked_list: res = chain( process_facility_warehouse_data.si(fac, start_date, end_date, runner) for fac in chunk)() res.get() non_facilities_chunked_list = chunked(non_facilities, 50) # then populate everything above a facility off a warehouse table for chunk in non_facilities_chunked_list: res = chain( process_non_facility_warehouse_data.si( org, start_date, end_date, runner, strict) for org in chunk)() res.get() runner.location = None runner.save()
def populate_report_data(start_date, end_date, domain, runner, strict=True): facilities = Location.filter_by_type(domain, 'FACILITY') non_facilities = list(Location.filter_by_type(domain, 'DISTRICT')) non_facilities += list(Location.filter_by_type(domain, 'REGION')) non_facilities += list(Location.filter_by_type(domain, 'MSDZONE')) non_facilities += list(Location.filter_by_type(domain, 'MOHSW')) if runner.location: if runner.location.location_type.name.upper() != 'FACILITY': facilities = [] non_facilities = itertools.dropwhile( lambda location: location.location_id != runner.location.location_id, non_facilities ) else: facilities = itertools.dropwhile( lambda location: location.location_id != runner.location.location_id, facilities ) facilities_chunked_list = chunked(facilities, 5) for chunk in facilities_chunked_list: res = chain(process_facility_warehouse_data.si(fac, start_date, end_date, runner) for fac in chunk)() res.get() non_facilities_chunked_list = chunked(non_facilities, 50) # then populate everything above a facility off a warehouse table for chunk in non_facilities_chunked_list: res = chain( process_non_facility_warehouse_data.si(org, start_date, end_date, runner, strict) for org in chunk )() res.get() runner.location = None runner.save()
def dump_locations(response, domain): file = StringIO() writer = Excel2007ExportWriter() location_types = defined_location_types(domain) defaults = get_default_column_data(domain, location_types) common_types = ['id', 'name', 'parent_id', 'latitude', 'longitude'] writer.open( header_table=[ (loc_type, [ common_types + get_custom_property_names(domain, loc_type) + defaults['headers'].get(loc_type, []) ]) for loc_type in location_types ], file=file, ) for loc_type in location_types: tab_rows = [] locations = Location.filter_by_type(domain, loc_type) for loc in locations: parent_id = loc.parent._id if loc.parent else '' custom_prop_values = [loc[prop.name] or '' for prop in location_custom_properties(domain, loc.location_type)] if loc._id in defaults['values']: default_column_values = defaults['values'][loc._id] else: default_column_values = [] tab_rows.append( [ loc._id, loc.name, parent_id, loc.latitude or '', loc.longitude or '' ] + custom_prop_values + default_column_values ) writer.write([(loc_type, tab_rows)]) writer.close() response.write(file.getvalue())
def dump_locations(response, domain): file = StringIO() writer = Excel2007ExportWriter() location_types = defined_location_types(domain) defaults = get_default_column_data(domain, location_types) common_types = ['id', 'name', 'parent_id', 'latitude', 'longitude'] writer.open( header_table=[ (loc_type, [ common_types + get_custom_property_names(domain, loc_type) + defaults['headers'].get(loc_type, []) ]) for loc_type in location_types ], file=file, ) for loc_type in location_types: tab_rows = [] locations = Location.filter_by_type(domain, loc_type) for loc in locations: parent_id = loc.parent._id if loc.parent else '' custom_prop_values = [loc[prop.name] or '' for prop in location_custom_properties(domain, loc.location_type)] if loc._id in defaults['values']: default_column_values = defaults['values'][loc._id] else: default_column_values = [] tab_rows.append( [ loc._id, loc.name, parent_id, loc.latitude or '', loc.longitude or '' ] + custom_prop_values + default_column_values ) writer.write([(loc_type, tab_rows)]) writer.close() response.write(file.getvalue())
def get_default_column_data(domain, location_types): data = { 'headers': {}, 'values': {} } if Domain.get_by_name(domain).commtrack_settings.individual_consumption_defaults: products = Product.by_domain(domain) supply_point_map = SupplyPointCase.get_location_map_by_domain(domain) consumption_dict = build_consumption_dict(domain) if not consumption_dict: return data for loc_type in location_types: loc = get_loc_config(domain)[loc_type] if not loc.administrative: data['headers'][loc_type] = [ 'default_' + p.code for p in products ] locations = Location.filter_by_type(domain, loc_type) for loc in locations: if loc._id in supply_point_map: sp_id = supply_point_map[loc._id] else: # this only happens if the supply point case did # not already exist sp_id = SupplyPointCase.get_or_create_by_location(loc)._id data['values'][loc._id] = [ get_loaded_default_monthly_consumption( consumption_dict, domain, p._id, loc_type, sp_id ) or '' for p in products ] else: data['headers'][loc_type] = [] return data
def _loc_type_dict(self, loc_type): uncategorized_keys = set() tab_rows = [] for loc in Location.filter_by_type(self.domain, loc_type.name): model_data, uncategorized_data = \ self.data_model.get_model_and_uncategorized(loc.metadata) uncategorized_keys.update(uncategorized_data.keys()) loc_dict = { 'location_id': loc.location_id, 'site_code': loc.site_code, 'name': loc.name, 'parent_site_code': loc.parent.site_code if loc.parent else '', 'latitude': loc.latitude or '', 'longitude': loc.longitude or '', 'data': model_data, 'uncategorized_data': uncategorized_data, 'consumption': self.get_consumption(loc), LOCATION_SHEET_HEADERS['external_id']: loc.external_id, LOCATION_SHEET_HEADERS['do_delete']: '' } tab_rows.append(dict(flatten_json(loc_dict))) header_keys = ['location_id', 'site_code', 'name', 'parent_code', 'latitude', 'longitude', 'external_id', 'do_delete'] tab_headers = [LOCATION_SHEET_HEADERS[h] for h in header_keys] def _extend_headers(prefix, headers): tab_headers.extend(json_to_headers( {prefix: {header: None for header in headers}} )) _extend_headers('data', (f.slug for f in self.data_model.fields)) _extend_headers('uncategorized_data', uncategorized_keys) if self.include_consumption_flag and loc_type.name not in self.administrative_types: _extend_headers('consumption', self.product_codes) sheet_title = loc_type.code return (sheet_title, { 'headers': tab_headers, 'rows': tab_rows, })
def get_default_column_data(domain, location_types): data = { 'headers': {}, 'values': {} } if Domain.get_by_name(domain).commtrack_settings.individual_consumption_defaults: products = Product.by_domain(domain) supply_point_map = SupplyPointCase.get_location_map_by_domain(domain) consumption_dict = build_consumption_dict(domain) if not consumption_dict: return data for loc_type in location_types: loc = get_loc_config(domain)[loc_type] if not loc.administrative: data['headers'][loc_type] = [ 'default_' + p.code for p in products ] locations = Location.filter_by_type(domain, loc_type) for loc in locations: if loc._id in supply_point_map: sp_id = supply_point_map[loc._id] else: # this only happens if the supply point case did # not already exist sp_id = SupplyPointCase.get_or_create_by_location(loc)._id data['values'][loc._id] = [ get_loaded_default_monthly_consumption( consumption_dict, domain, p._id, loc_type, sp_id ) or '' for p in products ] else: data['headers'][loc_type] = [] return data
def _loc_type_dict(self, loc_type): uncategorized_keys = set() tab_rows = [] for loc in Location.filter_by_type(self.domain, loc_type): model_data, uncategorized_data = \ self.data_model.get_model_and_uncategorized(loc.metadata) uncategorized_keys.update(uncategorized_data.keys()) loc_dict = { 'location_id': loc.location_id, 'site_code': loc.site_code, 'name': loc.name, 'parent_site_code': loc.parent.site_code if loc.parent else '', 'latitude': loc.latitude or '', 'longitude': loc.longitude or '', 'data': model_data, 'uncategorized_data': uncategorized_data, 'consumption': self.get_consumption(loc), } tab_rows.append(dict(flatten_json(loc_dict))) tab_headers = [ 'site_code', 'name', 'parent_site_code', 'latitude', 'longitude' ] if self.include_ids: tab_headers = ['location_id'] + tab_headers def _extend_headers(prefix, headers): tab_headers.extend( json_to_headers({prefix: {header: None for header in headers}})) _extend_headers('data', (f.slug for f in self.data_model.fields)) _extend_headers('uncategorized_data', uncategorized_keys) if self.include_consumption_flag and loc_type not in self.administrative_types: _extend_headers('consumption', self.product_codes) return (loc_type, { 'headers': tab_headers, 'rows': tab_rows, })
def handle(self, *args, **options): with open('parentage_results.csv', 'wb+') as csvfile: csv_writer = csv.writer( csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL ) csv_writer.writerow([ 'id', 'name', 'is_test', 'location_type', 'number_of_offending_locations', ]) domains = Domain.get_all() for d in domains: if d.commtrack_enabled: for loc_type in d.commtrack_settings.location_types: if len(loc_type.allowed_parents) > 1: count = len(list( Location.filter_by_type( d.name, loc_type.name, ) )) csv_writer.writerow([ d._id, d.name, d.is_test, loc_type.name, count ])
def handle(self, *args, **options): for location_type in ["wholesaler", "retailer"]: for location in Location.filter_by_type("colalifezambia", location_type): supply_point_case = SupplyPointCase.get_by_location(location) supply_point_case.location_type = location_type supply_point_case.save()
def outlets(self): locs = Location.filter_by_type(self.domain, self.get_terminal(), self.active_location) locs = filter(lambda loc: self.outlet_type_filter(loc), locs) return locs
def outlets(self): locs = Location.filter_by_type(self.domain, get_terminal(self.domain), self.active_location) locs = filter(lambda loc: self.outlet_type_filter(loc), locs) return locs
def test_location_queries(self): test_state1 = make_loc( 'teststate1', type='state', parent=self.user.location, domain=self.domain.name ) test_state2 = make_loc( 'teststate2', type='state', parent=self.user.location, domain=self.domain.name ) test_village1 = make_loc( 'testvillage1', type='village', parent=test_state1, domain=self.domain.name ) test_village1.site_code = 'tv1' test_village1.save() test_village2 = make_loc( 'testvillage2', type='village', parent=test_state2, domain=self.domain.name ) def compare(list1, list2): self.assertEqual( set([l._id for l in list1]), set([l._id for l in list2]) ) # descendants compare( [test_state1, test_state2, test_village1, test_village2], self.user.location.descendants ) # children compare( [test_state1, test_state2], self.user.location.children ) # siblings compare( [test_state2], test_state1.siblings() ) # parent and parent_id self.assertEqual( self.user.location._id, test_state1.parent_id ) self.assertEqual( self.user.location._id, test_state1.parent._id ) # is_root self.assertTrue(self.user.location.is_root) self.assertFalse(test_state1.is_root) # Location.root_locations compare( [self.user.location], Location.root_locations(self.domain.name) ) # Location.filter_by_type compare( [test_village1, test_village2], Location.filter_by_type(self.domain.name, 'village') ) compare( [test_village1], Location.filter_by_type(self.domain.name, 'village', test_state1) ) # Location.get_in_domain test_village2.domain = 'rejected' bootstrap_location_types('rejected') test_village2.save() self.assertEqual( Location.get_in_domain(self.domain.name, test_village1._id)._id, test_village1._id ) self.assertIsNone( Location.get_in_domain(self.domain.name, test_village2._id), ) self.assertIsNone( Location.get_in_domain(self.domain.name, 'not-a-real-id'), ) def _all_locations(domain): return Location.view( 'locations/hierarchy', startkey=[domain], endkey=[domain, {}], reduce=False, include_docs=True ).all() compare( [self.user.location, test_state1, test_state2, test_village1], _all_locations(self.domain.name) ) # Location.by_site_code self.assertEqual( test_village1._id, Location.by_site_code(self.domain.name, 'tv1')._id ) self.assertIsNone( None, Location.by_site_code(self.domain.name, 'notreal') ) # Location.by_domain compare( [self.user.location, test_state1, test_state2, test_village1], Location.by_domain(self.domain.name) )
def test_location_queries(self): test_state1 = make_loc( 'teststate1', type='state', parent=self.user.location, domain=self.domain.name ) test_state2 = make_loc( 'teststate2', type='state', parent=self.user.location, domain=self.domain.name ) test_village1 = make_loc( 'testvillage1', type='village', parent=test_state1, domain=self.domain.name ) test_village1.site_code = 'tv1' test_village1.save() test_village2 = make_loc( 'testvillage2', type='village', parent=test_state2, domain=self.domain.name ) def compare(list1, list2): self.assertEqual( set([l._id for l in list1]), set([l._id for l in list2]) ) # descendants compare( [test_state1, test_state2, test_village1, test_village2], self.user.location.descendants ) # children compare( [test_state1, test_state2], self.user.location.get_children() ) # parent and parent_location_id self.assertEqual( self.user.location.location_id, test_state1.parent_location_id ) self.assertEqual( self.user.location.location_id, test_state1.parent._id ) # Location.root_locations compare( [self.user.location], Location.root_locations(self.domain.name) ) # Location.filter_by_type compare( [test_village1, test_village2], Location.filter_by_type(self.domain.name, 'village') ) compare( [test_village1], Location.filter_by_type(self.domain.name, 'village', test_state1) ) create_domain('rejected') bootstrap_location_types('rejected') test_village2.domain = 'rejected' test_village2.save() self.assertEqual( {loc.location_id for loc in [self.user.location, test_state1, test_state2, test_village1]}, set(SQLLocation.objects.filter(domain=self.domain.name).location_ids()), ) # Location.by_site_code self.assertEqual( test_village1._id, Location.by_site_code(self.domain.name, 'tv1')._id ) self.assertIsNone( None, Location.by_site_code(self.domain.name, 'notreal') ) # Location.by_domain compare( [self.user.location, test_state1, test_state2, test_village1], Location.by_domain(self.domain.name) )
def outlets(self): locs = Location.filter_by_type(self.domain, 'outlet', self.active_location) locs = filter(lambda loc: self.outlet_type_filter(loc.outlet_type), locs) return locs
def test_location_queries(self): test_state1 = make_loc( 'teststate1', type='state', parent=self.user.location, domain=self.domain.name ) test_state2 = make_loc( 'teststate2', type='state', parent=self.user.location, domain=self.domain.name ) test_village1 = make_loc( 'testvillage1', type='village', parent=test_state1, domain=self.domain.name ) test_village1.site_code = 'tv1' test_village1.save() test_village2 = make_loc( 'testvillage2', type='village', parent=test_state2, domain=self.domain.name ) def compare(list1, list2): self.assertEqual( set([l._id for l in list1]), set([l._id for l in list2]) ) # descendants compare( [test_state1, test_state2, test_village1, test_village2], self.user.location.descendants ) # children compare( [test_state1, test_state2], self.user.location.children ) # siblings compare( [test_state2], test_state1.siblings() ) # parent and parent_id self.assertEqual( self.user.location._id, test_state1.parent_id ) self.assertEqual( self.user.location._id, test_state1.parent._id ) # is_root self.assertTrue(self.user.location.is_root) self.assertFalse(test_state1.is_root) # Location.root_locations compare( [self.user.location], Location.root_locations(self.domain.name) ) # Location.filter_by_type compare( [test_village1, test_village2], Location.filter_by_type(self.domain.name, 'village') ) compare( [test_village1], Location.filter_by_type(self.domain.name, 'village', test_state1) ) # Location.get_in_domain test_village2.domain = 'rejected' bootstrap_location_types('rejected') test_village2.save() self.assertEqual( Location.get_in_domain(self.domain.name, test_village1._id)._id, test_village1._id ) self.assertIsNone( Location.get_in_domain(self.domain.name, test_village2._id), ) self.assertIsNone( Location.get_in_domain(self.domain.name, 'not-a-real-id'), ) self.assertEqual( {loc._id for loc in [self.user.location, test_state1, test_state2, test_village1]}, set(SQLLocation.objects.filter(domain=self.domain.name).location_ids()), ) # Location.by_site_code self.assertEqual( test_village1._id, Location.by_site_code(self.domain.name, 'tv1')._id ) self.assertIsNone( None, Location.by_site_code(self.domain.name, 'notreal') ) # Location.by_domain compare( [self.user.location, test_state1, test_state2, test_village1], Location.by_domain(self.domain.name) )
def test_location_queries(self): test_state1 = make_loc( 'teststate1', type='state', parent=self.user.locations[0] ) test_state2 = make_loc( 'teststate2', type='state', parent=self.user.locations[0] ) test_village1 = make_loc( 'testvillage1', type='village', parent=test_state1 ) test_village1.site_code = 'tv1' test_village1.save() test_village2 = make_loc( 'testvillage2', type='village', parent=test_state2 ) def compare(list1, list2): self.assertEqual( set([l._id for l in list1]), set([l._id for l in list2]) ) # descendants compare( [test_state1, test_state2, test_village1, test_village2], self.user.locations[0].descendants ) # children compare( [test_state1, test_state2], self.user.locations[0].children ) # siblings compare( [test_state2], test_state1.siblings() ) # parent and parent_id self.assertEqual( self.user.locations[0]._id, test_state1.parent_id ) self.assertEqual( self.user.locations[0]._id, test_state1.parent._id ) # is_root self.assertTrue(self.user.locations[0].is_root) self.assertFalse(test_state1.is_root) # Location.root_locations compare( [self.user.locations[0]], Location.root_locations(self.domain.name) ) # Location.filter_by_type compare( [test_village1, test_village2], Location.filter_by_type(self.domain.name, 'village') ) compare( [test_village1], Location.filter_by_type(self.domain.name, 'village', test_state1) ) # Location.filter_by_type_count self.assertEqual( 2, Location.filter_by_type_count(self.domain.name, 'village') ) self.assertEqual( 1, Location.filter_by_type_count(self.domain.name, 'village', test_state1) ) # Location.get_in_domain test_village2.domain = 'rejected' test_village2.save() self.assertEqual( Location.get_in_domain(self.domain.name, test_village1._id)._id, test_village1._id ) self.assertIsNone( Location.get_in_domain(self.domain.name, test_village2._id), ) self.assertIsNone( Location.get_in_domain(self.domain.name, 'not-a-real-id'), ) # Location.all_locations compare( [self.user.locations[0], test_state1, test_state2, test_village1], Location.all_locations(self.domain.name) ) # Location.by_site_code self.assertEqual( test_village1._id, Location.by_site_code(self.domain.name, 'tv1')._id ) self.assertIsNone( None, Location.by_site_code(self.domain.name, 'notreal') ) # Location.by_domain compare( [self.user.locations[0], test_state1, test_state2, test_village1], Location.by_domain(self.domain.name) )