def populate_report_data(start_date, end_date, domain=None): # first populate all the warehouse tables for all facilities # hard coded to know this is the first date with data start_date = max(start_date, default_start_date()) # For QA purposes generate reporting data for only some small part of data. if TEST: locations = _get_test_locations(domain) facilities = filter(lambda location: location.location_type == 'FACILITY', locations) non_facilities_types = ['DISTRICT', 'REGION', 'MOHSW'] non_facilities = filter(lambda location: location.location_type in non_facilities_types, locations) else: facilities = Location.filter_by_type(domain, 'FACILITY') non_facilities = list(Location.filter_by_type(domain, 'DISTRICT')) non_facilities += list(Location.filter_by_type(domain, 'REGION')) non_facilities += list(Location.filter_by_type(domain, 'MOHSW')) for fac in facilities: process_facility_warehouse_data(fac, start_date, end_date) # then populate everything above a facility off a warehouse table for org in non_facilities: process_non_facility_warehouse_data(org, start_date, end_date) # finally go back through the history and initialize empty data for any # newly created facilities update_historical_data(domain)
def test_full_delete(self): test_loc = make_loc( 'test_loc', type='state', parent=self.user.location ) test_loc.save() original_count = len(list(Location.by_domain(self.domain.name))) loc = self.user.location loc.full_delete() # it should also delete children self.assertEqual( len(list(Location.by_domain(self.domain.name))), original_count - 2 ) self.assertEqual( len(Location.root_locations(self.domain.name)), 0 ) # permanently gone from sql db self.assertEqual( len(SQLLocation.objects.all()), 0 )
def _update_location_data(self, user): location_id = self.cleaned_data['primary_location'] location_ids = self.cleaned_data['assigned_locations'] if user.is_commcare_user(): old_location_id = user.location_id if location_id != old_location_id: if location_id: user.set_location(Location.get(location_id)) else: user.unset_location() old_location_ids = user.assigned_location_ids if set(location_ids) != set(old_location_ids): user.reset_locations(location_ids) else: domain_membership = user.get_domain_membership(self.domain) old_location_id = domain_membership.location_id if location_id != old_location_id: if location_id: user.set_location(self.domain, Location.get(location_id)) else: user.unset_location(self.domain) old_location_ids = domain_membership.assigned_location_ids if set(location_ids) != set(old_location_ids): user.reset_locations(self.domain, location_ids)
def handle(self, *args, **options): with open('location_results.csv', 'wb+') as csvfile: csv_writer = csv.writer( csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL ) csv_writer.writerow(['id', 'type', 'domain', 'property', 'value']) locations = list(set(Location.get_db().view( 'locations/by_type', reduce=False, wrapper=lambda row: row['id'], ).all())) problematic_domains = {} for loc in iter_docs(Location.get_db(), locations): loc = Location.get(loc['_id']) if self.has_any_hardcoded_properties(loc, csv_writer): if loc.domain in problematic_domains: problematic_domains[loc.domain] += 1 else: problematic_domains[loc.domain] = 1 self.stdout.write("\nDomain stats:\n") for domain, count in problematic_domains.iteritems(): self.stdout.write("%s: %d" % (domain, count))
def test_archiving_location_should_resync(self): """ When locations are archived, we should resync them """ couch_location = Location( domain=self.domain, name='winterfell', location_type=self.location_type.name, ) couch_location.save() after_save = datetime.utcnow() location = SQLLocation.objects.last() self.assertEqual(couch_location._id, location.location_id) self.assertEqual('winterfell', location.name) location_db = _location_footprint([location]) self.assertFalse(should_sync_locations(SyncLog(date=after_save), location_db, self.user)) # archive the location couch_location.archive() after_archive = datetime.utcnow() location = SQLLocation.objects.last() location_db = _location_footprint([location]) self.assertTrue(should_sync_locations(SyncLog(date=after_save), location_db, self.user)) self.assertFalse(should_sync_locations(SyncLog(date=after_archive), location_db, self.user))
def handle(self, *args, **options): with open('location_results.csv', 'wb+') as csvfile: csv_writer = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow(['id', 'type', 'domain', 'property', 'value']) locations = list( set(Location.get_db().view( 'locations/by_type', reduce=False, wrapper=lambda row: row['id'], ).all())) problematic_domains = {} for loc in iter_docs(Location.get_db(), locations): loc = Location.get(loc['_id']) if self.has_any_hardcoded_properties(loc, csv_writer): if loc.domain in problematic_domains: problematic_domains[loc.domain] += 1 else: problematic_domains[loc.domain] = 1 self.stdout.write("\nDomain stats:\n") for domain, count in problematic_domains.iteritems(): self.stdout.write("%s: %d" % (domain, count))
def get_non_archived_facilities_below(location): child_ids = ( location.sql_location.get_descendants(include_self=True) .filter(is_archived=False, location_type__name="FACILITY") .values_list("location_id", flat=True) ) return [Location.wrap(doc) for doc in get_docs(Location.get_db(), child_ids)]
def setUpClass(cls): cls.domain = 'supply-point-dbaccessors' cls.locations = [ Location(domain=cls.domain), Location(domain=cls.domain), Location(domain=cls.domain), ] Location.get_db().bulk_save(cls.locations) cls.supply_points = [ CommCareCase(domain=cls.domain, type='supply-point', location_id=cls.locations[0]._id), CommCareCase(domain=cls.domain, type='supply-point', location_id=cls.locations[1]._id), CommCareCase(domain=cls.domain, type='supply-point', location_id=cls.locations[2]._id), ] locations_by_id = { location._id: location for location in cls.locations } cls.location_supply_point_pairs = [ (locations_by_id[supply_point.location_id], supply_point) for supply_point in cls.supply_points ] CommCareCase.get_db().bulk_save(cls.supply_points)
def handle(self, *args, **options): try: domain = args[0] except IndexError: self.stderr.write('domain required\n') return self.println('Migrating...') for loc_id, case in get_supply_points_json_in_domain_by_location(domain): loc = Location.get(loc_id) old_code = case.get('site_code', '') new_code = getattr(loc, 'site_code', '') if old_code and not new_code: loc.site_code = old_code loc.save() self.println('migrated %s (%s)' % (loc.name, loc.site_code)) self.println('Verifying code uniqueness...') all_codes = Location.get_db().view('commtrack/locations_by_code', startkey=[domain], endkey=[domain, {}]) locs_by_code = map_reduce(lambda e: [(e['key'][-1].lower(), e['id'])], data=all_codes) for code, loc_ids in locs_by_code.iteritems(): if len(loc_ids) == 1: continue self.println('duplicate code [%s]' % code) locs = Location.view('_all_docs', keys=loc_ids, include_docs=True) for loc in locs: self.println(' %s [%s]' % (loc.name, loc._id))
def load_locs_json(domain, selected_loc_id=None): """initialize a json location tree for drill-down controls on the client. tree is only partially initialized and branches will be filled in on the client via ajax. what is initialized: * all top level locs * if a 'selected' loc is provided, that loc and its complete ancestry """ def loc_to_json(loc): return {"name": loc.name, "location_type": loc.location_type, "uuid": loc._id} loc_json = [loc_to_json(loc) for loc in root_locations(domain)] # if a location is selected, we need to pre-populate its location hierarchy # so that the data is available client-side to pre-populate the drop-downs selected = Location.get_in_domain(domain, selected_loc_id) if selected: lineage = list(Location.view("_all_docs", keys=selected.path, include_docs=True)) parent = {"children": loc_json} for loc in lineage: # find existing entry in the json tree that corresponds to this loc this_loc = [k for k in parent["children"] if k["uuid"] == loc._id][0] this_loc["children"] = [loc_to_json(loc) for loc in loc.children] parent = this_loc return loc_json
def sync_facility_to_supply_point(domain, facility): supply_point = get_supply_point(domain, facility) facility_dict = { 'domain': domain, 'location_type': facility.type, 'external_id': facility.code, 'name': facility.name, 'site_code': facility.code, # todo: do they have a human readable code? 'latitude': facility.latitude, 'longitude': facility.longitude, } parent_sp = None if facility.parent_id: parent_sp = get_supply_point(domain, facility.parent_id) if not parent_sp: raise BadParentException('No matching supply point with code %s found' % facility.parent_id) if supply_point is None: if parent_sp: facility_dict['parent'] = parent_sp.location facility_loc = Location(**facility_dict) facility_loc.save() return make_supply_point(domain, facility_loc) else: facility_loc = supply_point.location if parent_sp and facility_loc.parent_id != parent_sp.location._id: raise BadParentException('You are trying to move a location. This is currently not supported.') should_save = apply_updates(facility_loc, facility_dict) if should_save: facility_loc.save() return supply_point
def total(self, form): loc = Location.get(get_location_id_by_type(form=form, type=u'r\xe9gion')) count = list(Location.filter_by_type(form.domain, 'PPS', loc)).__len__() yield { 'date': form_date(form), 'value': count }
def main_context(self): try: facilities = Location.filter_by_type_count(self.domain, 'FACILITY') except TypeError: facilities = 0 contacts = CommCareUser.by_domain(self.domain, reduce=True) web_users = WebUser.by_domain(self.domain, reduce=True) try: products = len(Product.by_domain(self.domain)) except ResourceNotFound: products = 0 main_context = super(GlobalStats, self).main_context context = { 'supply_points': len(list(Location.by_domain(self.domain))), 'facilities': facilities, 'contacts': contacts[0]['value'] if contacts else 0, 'web_users': web_users[0]['value'] if web_users else 0, 'products': products, #TODO add next after the enlargement ILS migration 'product_stocks': 0, 'stock_transactions': 0, 'inbound_messages': 0, 'outbound_messages': 0 } main_context.update(context) return main_context
def create_locations(locations, types, parent): for name, children in locations: location = Location(domain=domain, name=name, parent=parent, location_type=types[0]) location.save() locations_dict[name] = location.sql_location create_locations(children, types[1:], location)
def testCreateVirtualFacility(self): loc = Location(site_code='1234', name='beavis', domain=self.domain, location_type='chw') loc.save() sp = make_supply_point(self.domain, loc) self.assertTrue(sync_supply_point_to_openlmis(sp, self.api)) self.assertTrue(sync_supply_point_to_openlmis(sp, self.api, False))
def handle(self, *args, **options): try: loc_uuid = args[0] except IndexError: self.stderr.write('location uuid required\n') return try: loc = Location.get(loc_uuid) if not loc or loc.doc_type != 'Location': raise ValueError except Exception: self.stderr.write('doc [%s] does not appear to be a location\n' % loc_uuid) return self.db = get_db() startkey = [loc.domain] + loc.path locs = Location.view('locations/hierarchy', startkey=startkey, endkey=startkey + [{}], reduce=False, include_docs=True) for k in locs: if k._id == loc._id: # don't delete orig loc until very end, so we can resume task if interrupted continue self.delete_doc(k, loc) startkey = [loc.domain, loc._id] linked = self.db.view('locations/linked_docs', startkey=startkey, endkey=startkey + [{}], include_docs=True) for k in linked: self.delete_doc(k['doc'], loc) self.delete_doc(loc, loc)
def _create_data(self, domain_name, i): product = Product(domain=domain_name, name='test-{}'.format(i)) product.save() location = Location(domain=domain_name, site_code='testcode-{}'.format(i), name='test-{}'.format(i), location_type='facility') location.save() report = StockReport.objects.create( type='balance', domain=domain_name, form_id='fake', date=datetime.utcnow(), server_date=datetime.utcnow(), ) StockTransaction.objects.create( report=report, product_id=product.get_id, sql_product=SQLProduct.objects.get(product_id=product.get_id), section_id='stock', type='stockonhand', case_id=location.linked_supply_point().get_id, stock_on_hand=100) SMS.objects.create(domain=domain_name) Call.objects.create(domain=domain_name) SQLLastReadMessage.objects.create(domain=domain_name) ExpectedCallback.objects.create(domain=domain_name) PhoneNumber.objects.create(domain=domain_name, is_two_way=False, pending_verification=False) event = MessagingEvent.objects.create( domain=domain_name, date=datetime.utcnow(), source=MessagingEvent.SOURCE_REMINDER, content_type=MessagingEvent.CONTENT_SMS, status=MessagingEvent.STATUS_COMPLETED) MessagingSubEvent.objects.create( parent=event, date=datetime.utcnow(), recipient_type=MessagingEvent.RECIPIENT_CASE, content_type=MessagingEvent.CONTENT_SMS, status=MessagingEvent.STATUS_COMPLETED) SelfRegistrationInvitation.objects.create( domain=domain_name, phone_number='999123', token=uuid.uuid4().hex, expiration_date=datetime.utcnow().date(), created_date=datetime.utcnow()) backend = SQLMobileBackend.objects.create(domain=domain_name, is_global=False) SQLMobileBackendMapping.objects.create( domain=domain_name, backend_type=SQLMobileBackend.SMS, prefix=str(i), backend=backend) MobileBackendInvitation.objects.create(domain=domain_name, backend=backend)
def test_archive(self): test_state = make_loc( 'teststate', type='state', parent=self.user.location ) test_state.save() original_count = len(list(Location.by_domain(self.domain.name))) loc = self.user.location loc.archive() # it should also archive children self.assertEqual( len(list(Location.by_domain(self.domain.name))), original_count - 2 ) self.assertEqual( len(Location.root_locations(self.domain.name)), 0 ) loc.unarchive() # and unarchive children self.assertEqual( len(list(Location.by_domain(self.domain.name))), original_count ) self.assertEqual( len(Location.root_locations(self.domain.name)), 1 )
def location_edit(request, domain, loc_id=None): parent_id = request.GET.get('parent') if loc_id: try: location = Location.get(loc_id) except ResourceNotFound: raise Http404() else: location = Location(domain=domain, parent=parent_id) if request.method == "POST": form = LocationForm(location, request.POST) if form.is_valid(): form.save() messages.success(request, 'Location saved!') return HttpResponseRedirect('%s?%s' % ( reverse('manage_locations', kwargs={'domain': domain}), urllib.urlencode({'selected': form.location._id}) )) else: form = LocationForm(location) context = { 'domain': domain, 'api_root': reverse('api_dispatch_list', kwargs={'domain': domain, 'resource_name': 'location', 'api_name': 'v0.3'}), 'location': location, 'hierarchy': location_hierarchy_config(domain), 'form': form, } return render(request, 'locations/manage/location.html', context)
def purge_locations(domain): """ Delete all location data associated with <domain>. This means Locations, SQLLocations, LocationTypes, and anything which has a ForeignKey relationship to SQLLocation (as of 2015-03-02, this includes only StockStates and some custom stuff). """ location_ids = set([r['id'] for r in Location.get_db().view( 'locations/by_type', reduce=False, startkey=[domain], endkey=[domain, {}], ).all()]) iter_bulk_delete(Location.get_db(), location_ids) for loc in SQLLocation.objects.filter(domain=domain).iterator(): if loc.supply_point_id: case = CommCareCase.get(loc.supply_point_id) case.delete() loc.delete() db = Domain.get_db() domain_obj = Domain.get_by_name(domain) # cached lookup is fast but stale domain_json = db.get(domain_obj._id) # get latest raw, unwrapped doc domain_json.pop('obsolete_location_types', None) domain_json.pop('location_types', None) db.save_doc(domain_json)
def make_loc(code, name, domain, type, metadata=None, parent=None): name = name or code LocationType.objects.get(domain=domain, name=type) loc = Location(site_code=code, name=name, domain=domain, location_type=type, parent=parent) loc.metadata = metadata or {} loc.save() return loc
def load_locs_json(domain, selected_loc_id=None): """initialize a json location tree for drill-down controls on the client. tree is only partially initialized and branches will be filled in on the client via ajax. what is initialized: * all top level locs * if a 'selected' loc is provided, that loc and its complete ancestry """ def loc_to_json(loc): return { 'name': loc.name, 'location_type': loc.location_type, 'uuid': loc._id, } loc_json = [loc_to_json(loc) for loc in root_locations(domain)] # if a location is selected, we need to pre-populate its location hierarchy # so that the data is available client-side to pre-populate the drop-downs if selected_loc_id: selected = Location.get(selected_loc_id) lineage = list(Location.view('_all_docs', keys=selected.path, include_docs=True)) parent = {'children': loc_json} for loc in lineage: # find existing entry in the json tree that corresponds to this loc this_loc = [k for k in parent['children'] if k['uuid'] == loc._id][0] this_loc['children'] = [loc_to_json(loc) for loc in loc.children] parent = this_loc return loc_json
def _create_data(self, domain_name, i): product = Product(domain=domain_name, name='test-{}'.format(i)) product.save() location = Location( domain=domain_name, site_code='testcode-{}'.format(i), name='test-{}'.format(i), location_type='facility' ) location.save() SupplyPointCase.create_from_location(domain_name, location) report = StockReport.objects.create( type='balance', domain=domain_name, form_id='fake', date=datetime.utcnow() ) StockTransaction.objects.create( report=report, product_id=product.get_id, sql_product=SQLProduct.objects.get(product_id=product.get_id), section_id='stock', type='stockonhand', case_id=location.linked_supply_point().get_id, stock_on_hand=100 )
def _delete_location_id(self, loc_uuid): try: loc = Location.get(loc_uuid) if not loc or loc.doc_type != 'Location': raise ValueError except Exception: self.stderr.write('doc [%s] does not appear to be a location\n' % loc_uuid) return self.db = get_db() startkey = [loc.domain] + loc.path locs = Location.view('locations/hierarchy', startkey=startkey, endkey=startkey + [{}], reduce=False, include_docs=True) for k in locs: if k._id == loc._id: # don't delete orig loc until very end, so we can resume task if interrupted continue self.delete_doc(k, loc) startkey = [loc.domain, loc._id] linked = self.db.view('locations/linked_docs', startkey=startkey, endkey=startkey + [{}], include_docs=True) success = True for k in linked: success = success and self.delete_doc(k['doc'], loc) if success: self.println('deleted location %s (%s)' % (loc._id, loc.name)) if not self.dryrun: self.db.delete_doc(loc) else: self.stderr.write('not deleting %s because there were errors' % loc._id)
def handle(self, domain_name, **options): domain = Domain.get_by_name(domain_name) if not domain: print(u'domain with name "{}" not found'.format(domain_name)) return sql_location_qs = SQLLocation.objects.filter(domain=domain_name) print('checking {} locations for issues'.format(sql_location_qs.count())) couch_locations_to_save = [] for sql_location in sql_location_qs: if sql_location.lineage != sql_location.couch_location.lineage: print('would change lineage of {} from {} to {}'.format( sql_location.name, '-->'.join(sql_location.couch_location.lineage), '-->'.join(sql_location.lineage), )) sql_location.couch_location.lineage = sql_location.lineage couch_locations_to_save.append(sql_location.couch_location.to_json()) if couch_locations_to_save: if not options['noinput']: confirm = raw_input( u""" Would you like to commit these changes? {} locations will be affected. (y/n) """.format(len(couch_locations_to_save)) ) if confirm != 'y': print("\n\t\taborted") return print(u"Committing changes") Location.get_db().bulk_save(couch_locations_to_save) print("Operation completed") else: print('no issues found')
def populate_report_data(start_date, end_date, domain, runner, locations=None, strict=True): # first populate all the warehouse tables for all facilities # hard coded to know this is the first date with data start_date = max(start_date, default_start_date()) # For QA purposes generate reporting data for only some small part of data. if not ILSGatewayConfig.for_domain(domain).all_stock_data: if locations is None: locations = _get_test_locations(domain) facilities = filter( lambda location: location.location_type == 'FACILITY', locations) non_facilities_types = ['DISTRICT', 'REGION', 'MSDZONE', 'MOHSW'] non_facilities = [] for location_type in non_facilities_types: non_facilities.extend( filter( lambda location: location.location_type == location_type, locations)) else: facilities = Location.filter_by_type(domain, 'FACILITY') non_facilities = list(Location.filter_by_type(domain, 'DISTRICT')) non_facilities += list(Location.filter_by_type(domain, 'REGION')) non_facilities += list(Location.filter_by_type(domain, 'MSDZONE')) non_facilities += list(Location.filter_by_type(domain, 'MOHSW')) if runner.location: if runner.location.location_type.name.upper() != 'FACILITY': facilities = [] non_facilities = itertools.dropwhile( lambda location: location.location_id != runner.location. location_id, non_facilities) else: facilities = itertools.dropwhile( lambda location: location.location_id != runner.location. location_id, facilities) facilities_chunked_list = chunked(facilities, 5) for chunk in facilities_chunked_list: res = chain( process_facility_warehouse_data.si(fac, start_date, end_date, runner) for fac in chunk)() res.get() non_facilities_chunked_list = chunked(non_facilities, 50) # then populate everything above a facility off a warehouse table for chunk in non_facilities_chunked_list: res = chain( process_non_facility_warehouse_data.si( org, start_date, end_date, runner, strict) for org in chunk)() res.get() runner.location = None runner.save()
def testSimpleName(self): location = Location(name="Some Location", domain=self.domain, location_type="type") location.save() self.assertEqual(location.site_code, 'some_location')
def testOtherCharacters(self): location = Location(name=u"Somé$ #Location (Old)", domain=self.domain, location_type="type") location.save() self.assertEqual(location.site_code, 'some_location_old')
def get_non_archived_facilities_below(location): child_ids = location.sql_location.get_descendants( include_self=True).filter(is_archived=False, location_type__name='FACILITY').values_list( 'location_id', flat=True) return [ Location.wrap(doc) for doc in get_docs(Location.get_db(), child_ids) ]
def get_location_hierarchy_by_id(location_id, domain, CCT_only=False): if location_id is None or len(location_id) == 0: return [location.get_id for location in Location.by_domain(domain) if not CCT_only or _is_location_CCT(location)] else: user_location = Location.get(location_id) locations = [location.get_id for location in user_location.descendants if not CCT_only or _is_location_CCT(location)] if not CCT_only or _is_location_CCT(user_location): locations.insert(0, user_location.get_id) return locations
def get_data(self): # todo: this will probably have to paginate eventually if self.all_relevant_forms: sp_ids = get_relevant_supply_point_ids( self.domain, self.active_location, ) form_xmlnses = [form['xmlns'] for form in self.all_relevant_forms.values()] spoint_loc_map = { doc['_id']: doc['location_id'] for doc in iter_docs(SupplyPointCase.get_db(), sp_ids) } locations = { doc['_id']: Location.wrap(doc) for doc in iter_docs(Location.get_db(), spoint_loc_map.values()) } for spoint_id, loc_id in spoint_loc_map.items(): loc = locations[loc_id] form_ids = StockReport.objects.filter( stocktransaction__case_id=spoint_id ).exclude( date__lte=self.start_date ).exclude( date__gte=self.end_date ).values_list( 'form_id', flat=True ).order_by('-date').distinct() # not truly distinct due to ordering matched = False for form_id in form_ids: try: if XFormInstance.get(form_id).xmlns in form_xmlnses: yield { 'loc_id': loc._id, 'loc_path': loc.path, 'name': loc.name, 'type': loc.location_type, 'reporting_status': 'reporting', 'geo': loc._geopoint, } matched = True break except ResourceNotFound: logging.error('Stock report for location {} in {} references non-existent form {}'.format( loc._id, loc.domain, form_id )) if not matched: yield { 'loc_id': loc._id, 'loc_path': loc.path, 'name': loc.name, 'type': loc.location_type, 'reporting_status': 'nonreporting', 'geo': loc._geopoint, }
def setUp(self): self.domain = Domain.get_by_name(TEST_DOMAIN) self.loc1 = Location.by_site_code(TEST_DOMAIN, 'loc1') self.loc2 = Location.by_site_code(TEST_DOMAIN, 'loc2') self.dis = Location.by_site_code(TEST_DOMAIN, 'dis1') self.user_fac1 = CommCareUser.get_by_username('stella') self.user_fac2 = CommCareUser.get_by_username('bella') self.user_dis = CommCareUser.get_by_username('trella') self.msd_user = CommCareUser.get_by_username('msd_person')
def _create_data(self, domain_name): product = Product(domain=domain_name, name='test-product') product.save() location = Location(domain=domain_name, site_code='testcode', name='test1', location_type='facility') location.save() self.locations[domain_name] = location.get_id DeliveryGroupReport.objects.create(location_id=location.get_id, quantity=1, message='test', delivery_group='A') SupplyPointWarehouseRecord.objects.create( supply_point=location.get_id, create_date=datetime.utcnow()) Alert.objects.create(text='test', expires=datetime.utcnow(), date=datetime.utcnow(), location_id=location.get_id) organization_summary = OrganizationSummary.objects.create( date=datetime.utcnow(), location_id=location.get_id) GroupSummary.objects.create(org_summary=organization_summary) ProductAvailabilityData.objects.create(product=product.get_id, date=datetime.utcnow(), location_id=location.get_id) SupplyPointStatus.objects.create(location_id=location.get_id, status_type='del_fac', status_value='received') HistoricalLocationGroup.objects.create( location_id=location.sql_location, group='A', date=datetime.utcnow().date()) ReportRun.objects.create(domain=domain_name, start=datetime.utcnow(), end=datetime.utcnow(), start_run=datetime.utcnow()) ILSNotes.objects.create(location=location.sql_location, domain=domain_name, user_name='test', date=datetime.utcnow(), text='test') SupervisionDocument.objects.create(domain=domain_name, document='test', data_type='test', name='test')
def make_loc(code, name=None, domain=TEST_DOMAIN, type=TEST_LOCATION_TYPE, parent=None, is_archived=False): name = name or code loc = Location( site_code=code, name=name, domain=domain, location_type=type, parent=parent, is_archived=is_archived ) loc.save() return loc
def make_loc(code, name=None, domain=TEST_DOMAIN, type=TEST_LOCATION_TYPE, parent=None): if not Domain.get_by_name(domain): raise AssertionError("You can't make a location on a fake domain") name = name or code LocationType.objects.get_or_create(domain=domain, name=type, defaults={'administrative': False}) loc = Location(site_code=code, name=name, domain=domain, location_type=type, parent=parent) loc.save() return loc
def make_loc(code, name, domain, type, parent=None): name = name or code sql_type, _ = LocationType.objects.get_or_create(domain=domain, name=type) loc = Location(site_code=code, name=name, domain=domain, location_type=type, parent=parent) loc.save() sql_location = loc.sql_location sql_location.products = [] sql_location.save() return loc
def testSimpleName(self): location = Location( name="Some Location", domain=self.domain.name, location_type="type" ) location.save() self.assertEqual(location.site_code, 'some_location')
def testOtherCharacters(self): location = Location( name=u"Somé$ #Location (Old)", domain=self.domain.name, location_type="type" ) location.save() self.assertEqual(location.site_code, 'some_location_old')
def rows(self): rows = [] if not self.config['products']: prd_id = SQLProduct.objects.filter(domain=self.config['domain'])\ .order_by('code').values_list('product_id') else: prd_id = self.config['products'] if self.config['location_id']: location = Location.get(self.config['location_id']) for loc in location.children: org_summary = OrganizationSummary.objects.filter(date__range=(self.config['startdate'], self.config['enddate']), supply_point=loc._id)[0] soh_data = GroupSummary.objects.get(title=SupplyPointStatusTypes.SOH_FACILITY, org_summary=org_summary) facs = Location.filter_by_type(self.config['domain'], 'FACILITY', loc) facs_count = (float(len(list(facs))) or 1) soh_on_time = soh_data.on_time * 100 / facs_count soh_late = soh_data.late * 100 / facs_count soh_not_responding = soh_data.not_responding * 100 / facs_count fac_ids = get_relevant_supply_point_ids(self.config['domain'], loc) stockouts = (StockTransaction.objects.filter( case_id__in=fac_ids, quantity__lte=0, report__date__month=int(self.config['month']), report__date__year=int(self.config['year'])).count() or 0) / facs_count url = make_url( StockOnHandReport, self.config['domain'], '?location_id=%s&month=%s&year=%s', (loc._id, self.config['month'], self.config['year'])) row_data = [ link_format(loc.name, url), format_percent(soh_on_time), format_percent(soh_late), format_percent(soh_not_responding), format_percent(stockouts) ] for product in prd_id: ps = ProductAvailabilityData.objects.filter( supply_point=loc._id, product=product, date=self.config['startdate']) if ps: row_data.append(format_percent(ps[0].without_stock * 100 / float(ps[0].total))) else: row_data.append("<span class='no_data'>None</span>") rows.append(row_data) return rows
def _create_location_from_supply_point(self, supply_point, location): try: sql_location = SQLLocation.objects.get(domain=self.domain, site_code=supply_point.code) return Loc.get(sql_location.location_id) except SQLLocation.DoesNotExist: parent = location if supply_point.code in TEACHING_HOSPITAL_MAPPING: parent = self._sync_parent(TEACHING_HOSPITAL_MAPPING[ supply_point.code]['parent_external_id']) new_location = Loc(parent=parent) new_location.domain = self.domain new_location.location_type = supply_point.type new_location.name = supply_point.name new_location.site_code = supply_point.code if supply_point.supervised_by: new_location.metadata[ 'supervised_by'] = supply_point.supervised_by new_location.save() sql_loc = new_location.sql_location sql_loc.products = SQLProduct.objects.filter( domain=self.domain, code__in=supply_point.products) sql_loc.save() return new_location
def location_fixture_generator(user, version, last_sync=None): """ By default this will generate a fixture for the users location and it's "footprint", meaning the path to a root location through parent hierarchies. There is an admin feature flag that will make this generate a fixture with ALL locations for the domain. """ project = user.project if (not project or not project.commtrack_enabled or not project.commtrack_settings or not project.commtrack_settings.sync_location_fixtures): return [] if toggles.SYNC_ALL_LOCATIONS.enabled(user.domain): location_db = _location_footprint(Location.by_domain(user.domain)) else: locations = [] if user.location: # add users location (and ancestors) to fixture locations.append(user.location) # optionally add all descendants as well if user.location.location_type_object.view_descendants: locations += user.location.descendants if user.project.supports_multiple_locations_per_user: # this might add duplicate locations but we filter that out later locations += user.locations location_db = _location_footprint(locations) if not should_sync_locations(last_sync, location_db): return [] root = ElementTree.Element('fixture', {'id': 'commtrack:locations', 'user_id': user.user_id}) loc_types = project.location_types type_to_slug_mapping = dict((ltype.name, ltype.code) for ltype in loc_types) def location_type_lookup(location_type): return type_to_slug_mapping.get(location_type, unicode_slug(location_type)) if toggles.SYNC_ALL_LOCATIONS.enabled(user.domain): root_locations = Location.root_locations(user.domain) else: root_locations = filter(lambda loc: loc.parent_id is None, location_db.by_id.values()) if not root_locations: return [] else: _append_children(root, location_db, root_locations, location_type_lookup) return [root]
def _create_data(self, domain_name): product = Product(domain=domain_name, name="test-product") product.save() location = Location(domain=domain_name, site_code="testcode", name="test1", location_type="facility") location.save() self.locations[domain_name] = location.sql_location user = CommCareUser.create(domain=domain_name, username="******".format(domain_name), password="******") FacilityInCharge.objects.create(user_id=user.get_id, location=location.sql_location)
def location_dump(request, domain): loc_ids = [row['id'] for row in Location.view('commtrack/locations_by_code', startkey=[domain], endkey=[domain, {}])] resp = HttpResponse(content_type='text/csv') resp['Content-Disposition'] = 'attachment; filename="locations_%s.csv"' % domain w = csv.writer(resp) w.writerow(['UUID', 'Location Type', 'SMS Code']) for raw in iter_docs(Location.get_db(), loc_ids): loc = Location.wrap(raw) w.writerow([loc._id, loc.location_type, loc.site_code]) return resp
def test_id_of_invalid_parent_type(self): # state can't have outlet as child data = {'name': 'oops', 'parent_site_code': self.test_state.site_code} original_count = len(list(Location.by_domain(self.domain.name))) result = import_location(self.domain.name, 'village', data) self.assertEqual(result['id'], None) self.assertEqual(len(list(Location.by_domain(self.domain.name))), original_count) self.assertTrue('Invalid parent type' in result['message'])
def make_loc(code, name=None, domain=TEST_DOMAIN, type=TEST_LOCATION_TYPE, parent=None): name = name or code loc = Location(site_code=code, name=name, domain=domain, location_type=type, parent=parent) loc.save() return loc
def _get_district_location(self, domain, sp): locs = Location.view('locations/by_name', startkey=[domain, "DISTRICT", sp], endkey=[domain, "DISTRICT", sp + "z"], reduce=False, include_docs=True) if len(locs) > 1: locs = Location.view('locations/by_name', startkey=[domain, "DISTRICT", sp], endkey=[domain, "DISTRICT", sp], reduce=False, include_docs=True) return locs[0]
def test_id_of_invalid_parent_type(self): # state can't have outlet as child parent = make_loc('sillyparents', type='state') data = {'name': 'oops', 'outlet_type': 'SHG', 'parent_id': parent._id} original_count = len(list(Location.by_domain(self.domain.name))) result = import_location(self.domain.name, 'outlet', data) self.assertEqual(result['id'], None) self.assertEqual(len(list(Location.by_domain(self.domain.name))), original_count) self.assertTrue('Invalid parent type' in result['message'])
def locations_fix(domain): locations = SQLLocation.objects.filter(domain=domain, location_type__in=['country', 'region', 'district']) for loc in locations: sp = Location.get(loc.location_id).linked_supply_point() if sp: sp.external_id = None sp.save() else: fake_location = Location( _id=loc.location_id, name=loc.name, domain=domain ) SupplyPointCase.get_or_create_by_location(fake_location)
def make_loc(code, name=None, domain=TEST_DOMAIN, type=TEST_LOCATION_TYPE, parent=None): name = name or code LocationType.objects.get_or_create(domain=domain, name=type) loc = Location(site_code=code, name=name, domain=domain, location_type=type, parent=parent) loc.save() return loc
def test_import_with_invalid_location_id(self): """ When importing with a invalid location id, import_location should not create a new location """ data = { 'location_id': 'i-am-invalid', 'name': 'importedloc', } result = import_location(self.domain.name, 'state', data) self.assertIsNone(result['id']) with self.assertRaises(ResourceNotFound): Location.get('i-am-invalid')
def get_location_hierarchy_by_id(location_id, domain, CCT_only=False): if location_id is None or len(location_id) == 0: return [ location.get_id for location in Location.by_domain(domain) if not CCT_only or _is_location_CCT(location) ] else: user_location = Location.get(location_id) locations = [ location.get_id for location in user_location.descendants if not CCT_only or _is_location_CCT(location) ] if not CCT_only or _is_location_CCT(user_location): locations.insert(0, user_location.get_id) return locations
def testCreateSupplyPointFromFacility(self): [f1, f2] = self._get_facilities() self.assertEqual(0, len(list(Location.by_domain(TEST_DOMAIN)))) sp1 = sync_facility_to_supply_point(TEST_DOMAIN, f1) locs = list(Location.by_domain(TEST_DOMAIN)) self.assertEqual(1, len(locs)) [loc1] = locs # check loc self.assertEqual(f1.name, loc1.name) self.assertEqual(f1.code, loc1.external_id) # check supply point self.assertEqual(f1.name, sp1.name) self.assertEqual(f1.code, sp1.external_id) self.assertEqual(sp1.location._id, loc1._id)
def get_fixture(self, user, location_id): """ Generate a fixture representation of all locations available to the user <fixture id="fixture:user-locations" user_id="4ce8b1611c38e953d3b3b84dd3a7ac18"> <locations> <location name="Location 1" id="1039d1611c38e953d3b3b84ddc01d93" <!-- ... --> </locations> </fixture> """ root = ElementTree.Element('fixture', attrib={ 'id': self.id, 'user_id': user._id }) locations_element = ElementTree.Element('locations') location = Location.get(location_id) location_element = ElementTree.Element('location', attrib={ 'name': location.name, 'id': location.get_id }) locations_element.append(location_element) root.append(locations_element) return root