def _update_location_data(self, user): location_id = self.cleaned_data['primary_location'] location_ids = self.cleaned_data['assigned_locations'] if user.is_commcare_user(): old_location_id = user.location_id if location_id != old_location_id: if location_id: user.set_location(Location.get(location_id)) else: user.unset_location() old_location_ids = user.assigned_location_ids if set(location_ids) != set(old_location_ids): user.reset_locations(location_ids) else: domain_membership = user.get_domain_membership(self.domain) old_location_id = domain_membership.location_id if location_id != old_location_id: if location_id: user.set_location(self.domain, Location.get(location_id)) else: user.unset_location(self.domain) old_location_ids = domain_membership.assigned_location_ids if set(location_ids) != set(old_location_ids): user.reset_locations(self.domain, location_ids)
def locations(self): if not self.active_location: return [] if self.active_location.location_type == 'country': return [Location.get(location.location_id) for location in self.facilities()] elif self.active_location.location_type == 'region': return [Location.get(location.location_id) for location in self.facilities(parent__parent__location_id=self.active_location._id)] elif self.active_location.location_type == 'district': return [Location.get(location.location_id) for location in self.facilities(parent__location_id=self.active_location._id)] else: return [self.active_location]
def test_import_with_invalid_location_id(self): """ When importing with a invalid location id, import_location should not create a new location """ data = { 'location_id': 'i-am-invalid', 'name': 'importedloc', } result = import_location(self.domain.name, 'state', data) self.assertIsNone(result['id']) with self.assertRaises(ResourceNotFound): Location.get('i-am-invalid')
def _create_location_from_supply_point(self, supply_point, location): try: sql_location = SQLLocation.objects.get(domain=self.domain, site_code=supply_point.code) return Loc.get(sql_location.location_id) except SQLLocation.DoesNotExist: parent = location if supply_point.code in TEACHING_HOSPITAL_MAPPING: parent = self._sync_parent(TEACHING_HOSPITAL_MAPPING[ supply_point.code]['parent_external_id']) new_location = Loc(parent=parent) new_location.domain = self.domain new_location.location_type = supply_point.type new_location.name = supply_point.name new_location.site_code = supply_point.code if supply_point.supervised_by: new_location.metadata[ 'supervised_by'] = supply_point.supervised_by new_location.save() sql_loc = new_location.sql_location sql_loc.products = SQLProduct.objects.filter( domain=self.domain, code__in=supply_point.products) sql_loc.save() return new_location
def update_product_availability_facility_data(org_summary): # product availability facility = Location.get(docid=org_summary.location_id) assert facility.location_type == "FACILITY" prods = SQLProduct.objects.filter(domain=facility.domain, is_archived=False) for p in prods: product_data, created = ProductAvailabilityData.objects.get_or_create( product=p.product_id, location_id=facility._id, date=org_summary.date) if created: # set defaults product_data.total = 1 previous_reports = ProductAvailabilityData.objects.filter( product=p.product_id, location_id=facility._id, date__lt=org_summary.date, total=1) if previous_reports.count(): prev = previous_reports.latest('date') product_data.with_stock = prev.with_stock product_data.without_stock = prev.without_stock product_data.without_data = prev.without_data else: # otherwise we use the defaults product_data.with_stock = 0 product_data.without_stock = 0 product_data.without_data = 1 product_data.save() assert (product_data.with_stock + product_data.without_stock + product_data.without_data) == 1, \ "bad product data config for %s" % product_data
def handle(self, *args, **options): self.stdout.write("...\n") relevant_ids = set([ r['id'] for r in CouchUser.get_db().view( 'users/by_username', reduce=False, ).all() ]) to_save = [] for user_doc in iter_docs(CouchUser.get_db(), relevant_ids): if 'commtrack_location' in user_doc: user = CommCareUser.get(user_doc['_id']) try: original_location_object = Location.get( user['commtrack_location']) except ResourceNotFound: # if there was bad data in there before, we can ignore it continue user.set_location(original_location_object) del user_doc['commtrack_location'] to_save.append(user_doc) if len(to_save) > 500: CouchUser.get_db().bulk_save(to_save) to_save = [] if to_save: CouchUser.get_db().bulk_save(to_save)
def _delete_location_id(self, loc_uuid): try: loc = Location.get(loc_uuid) if not loc or loc.doc_type != 'Location': raise ValueError except Exception: self.stderr.write('doc [%s] does not appear to be a location\n' % loc_uuid) return self.db = get_db() startkey = [loc.domain] + loc.path locs = Location.view('locations/hierarchy', startkey=startkey, endkey=startkey + [{}], reduce=False, include_docs=True) for k in locs: if k._id == loc._id: # don't delete orig loc until very end, so we can resume task if interrupted continue self.delete_doc(k, loc) startkey = [loc.domain, loc._id] linked = self.db.view('locations/linked_docs', startkey=startkey, endkey=startkey + [{}], include_docs=True) success = True for k in linked: success = success and self.delete_doc(k['doc'], loc) if success: self.println('deleted location %s (%s)' % (loc._id, loc.name)) if not self.dryrun: self.db.delete_doc(loc) else: self.stderr.write('not deleting %s because there were errors' % loc._id)
def location(self): if self.location_id is None: return None try: return Location.get(self.location_id) except ResourceNotFound: return None
def data_providers(self): config = self.report_config data_providers = [] if config['location_id']: data_providers = [ RandRSubmissionData(config=config, css_class='row_chart_all') ] location = Location.get(config['location_id']) if location.location_type in ['REGION', 'MSDZONE', 'MOHSW']: data_providers.append( RRStatus(config=config, css_class='row_chart_all')) elif location.location_type == 'FACILITY': return [ InventoryHistoryData(config=config), RandRHistory(config=config), Notes(config=config), RecentMessages(config=config), RegistrationData(config=dict(loc_type='FACILITY', **config), css_class='row_chart_all'), RegistrationData(config=dict(loc_type='DISTRICT', **config), css_class='row_chart_all'), RegistrationData(config=dict(loc_type='REGION', **config), css_class='row_chart_all') ] else: data_providers.append( RRReportingHistory(config=config, css_class='row_chart_all')) return data_providers
def _get_test_locations(domain): from custom.ilsgateway.tasks import ILS_FACILITIES sql_locations = SQLLocation.objects.filter( domain=domain, external_id__in=ILS_FACILITIES ).order_by('id').only('location_id') return [Location.get(sql_location.location_id) for sql_location in sql_locations]
def needed_status_types(org_summary): facility = Location.get(org_summary.location_id) return [ status_type for status_type in const.NEEDED_STATUS_TYPES if _is_valid_status(facility, org_summary.date, status_type) ]
def handle(self, *args, **options): try: domain = args[0] except IndexError: self.stderr.write('domain required\n') return self.println('Migrating...') for loc_id, case in get_supply_points_json_in_domain_by_location(domain): loc = Location.get(loc_id) old_code = case.get('site_code', '') new_code = getattr(loc, 'site_code', '') if old_code and not new_code: loc.site_code = old_code loc.save() self.println('migrated %s (%s)' % (loc.name, loc.site_code)) self.println('Verifying code uniqueness...') all_codes = Location.get_db().view('commtrack/locations_by_code', startkey=[domain], endkey=[domain, {}]) locs_by_code = map_reduce(lambda e: [(e['key'][-1].lower(), e['id'])], data=all_codes) for code, loc_ids in locs_by_code.iteritems(): if len(loc_ids) == 1: continue self.println('duplicate code [%s]' % code) locs = Location.view('_all_docs', keys=loc_ids, include_docs=True) for loc in locs: self.println(' %s [%s]' % (loc.name, loc._id))
def handle(self, *args, **options): with open('location_results.csv', 'wb+') as csvfile: csv_writer = csv.writer( csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL ) csv_writer.writerow(['id', 'type', 'domain', 'property', 'value']) locations = list(set(Location.get_db().view( 'locations/by_type', reduce=False, wrapper=lambda row: row['id'], ).all())) problematic_domains = {} for loc in iter_docs(Location.get_db(), locations): loc = Location.get(loc['_id']) if self.has_any_hardcoded_properties(loc, csv_writer): if loc.domain in problematic_domains: problematic_domains[loc.domain] += 1 else: problematic_domains[loc.domain] = 1 self.stdout.write("\nDomain stats:\n") for domain, count in problematic_domains.iteritems(): self.stdout.write("%s: %d" % (domain, count))
def api_query_supply_point(request, domain): id = request.GET.get('id') query = request.GET.get('name', '') def loc_to_payload(loc): return {'id': loc._id, 'name': loc.name} if id: loc = Location.get(id) if loc: payload = loc_to_payload(loc) else: payload = None else: LIMIT = 100 loc_types = [loc_type.name for loc_type in Domain.get_by_name(domain).commtrack_settings.location_types if not loc_type.administrative] def get_locs(type): # TODO use ES instead? q = query.lower() startkey = [domain, type, q] endkey = [domain, type, q + 'zzzzzz'] return Location.view('locations/by_name', startkey=startkey, endkey=endkey, limit=LIMIT, include_docs=True) locs = sorted(itertools.chain(*(get_locs(loc_type) for loc_type in loc_types)), key=lambda e: e.name)[:LIMIT] payload = map(loc_to_payload, locs) return HttpResponse(json.dumps(payload), 'text/json')
def attach_locations(xform, cases): """ Given a received form and cases, update the location of that form to the location of its cases (if they have one). """ # todo: this won't change locations if you are trying to do that via XML. # this is mainly just a performance thing so you don't have to do extra lookups # every time you touch a case if cases: found_loc = None for case in cases: loc = None if not case.location_: if case.type == const.SUPPLY_POINT_CASE_TYPE: loc_id = getattr(case, 'location_id', None) if loc_id: loc = Location.get(loc_id) case.bind_to_location(loc) elif case.type == const.REQUISITION_CASE_TYPE: req = RequisitionCase.wrap(case._doc) prod = req.get_product_case() if prod and prod.location_ and prod.location_ != case.location_: case.location_ = prod.location_ case.save()
def location_edit(request, domain, loc_id=None): parent_id = request.GET.get('parent') if loc_id: try: location = Location.get(loc_id) except ResourceNotFound: raise Http404() else: location = Location(domain=domain, parent=parent_id) if request.method == "POST": form = LocationForm(location, request.POST) if form.is_valid(): form.save() messages.success(request, 'Location saved!') return HttpResponseRedirect('%s?%s' % ( reverse('manage_locations', kwargs={'domain': domain}), urllib.urlencode({'selected': form.location._id}) )) else: form = LocationForm(location) context = { 'domain': domain, 'api_root': reverse('api_dispatch_list', kwargs={'domain': domain, 'resource_name': 'location', 'api_name': 'v0.3'}), 'location': location, 'hierarchy': location_hierarchy_config(domain), 'form': form, } return render(request, 'locations/manage/location.html', context)
def post(self, request, *args, **kwargs): if self.new_commcare_user_form.is_valid( ) and self.custom_data.is_valid(): username = self.new_commcare_user_form.cleaned_data['username'] password = self.new_commcare_user_form.cleaned_data['password'] phone_number = self.new_commcare_user_form.cleaned_data[ 'phone_number'] user = CommCareUser.create( self.domain, username, password, phone_number=phone_number, device_id="Generated from HQ", user_data=self.custom_data.get_data_to_save(), ) if 'location_id' in request.GET: try: loc = Location.get(request['location_id']) except ResourceNotFound: raise Http404() if loc.domain != self.domain: raise Http404() user.set_location(loc) if phone_number: initiate_sms_verification_workflow(user, phone_number) user_json = {'user_id': user._id, 'text': user.username_in_report} return self.render_json_response({ "status": "success", "user": user_json }) return self.render_form("failure")
def get_fixture(self, user, location_id): """ Generate a fixture representation of all locations available to the user <fixture id="fixture:user-locations" user_id="4ce8b1611c38e953d3b3b84dd3a7ac18"> <locations> <location name="Location 1" id="1039d1611c38e953d3b3b84ddc01d93" <!-- ... --> </locations> </fixture> """ root = ElementTree.Element('fixture', attrib={ 'id': self.id, 'user_id': user._id }) locations_element = ElementTree.Element('locations') location = Location.get(location_id) location_element = ElementTree.Element('location', attrib={ 'name': location.name, 'id': location.get_id }) locations_element.append(location_element) root.append(locations_element) return root
def total(self, form): loc = Location.get(get_location_id_by_type(form=form, type=u'r\xe9gion')) count = list(Location.filter_by_type(form.domain, 'PPS', loc)).__len__() yield { 'date': form_date(form), 'value': count }
def api_query_supply_point(request, domain): id = request.GET.get('id') query = request.GET.get('name', '') def loc_to_payload(loc): return {'id': loc._id, 'name': loc.name} if id: try: loc = Location.get(id) return HttpResponse(json.dumps(loc_to_payload(loc)), 'text/json') except ResourceNotFound: return HttpResponseNotFound(json.dumps({'message': 'no location with is %s found' % id}, 'text/json')) else: LIMIT = 100 loc_types = [loc_type.name for loc_type in Domain.get_by_name(domain).commtrack_settings.location_types if not loc_type.administrative] def get_locs(type): # TODO use ES instead? q = query.lower() startkey = [domain, type, q] endkey = [domain, type, q + 'zzzzzz'] return Location.view('locations/by_name', startkey=startkey, endkey=endkey, limit=LIMIT, reduce=False, include_docs=True, ) locs = sorted(itertools.chain(*(get_locs(loc_type) for loc_type in loc_types)), key=lambda e: e.name)[:LIMIT] return HttpResponse(json.dumps(map(loc_to_payload, locs)), 'text/json')
def process_transfers(E, transfers): if transfers: attr = { 'section-id': 'stock', } if transfers[0].action == const.RequisitionActions.RECEIPTS: attr['src'] = transfers[0].case_id sp = Location.get(transfers[0].location_id).linked_supply_point() attr['dest'] = sp._id else: if transfers[0].action in [ const.StockActions.RECEIPTS, const.RequisitionActions.FULFILL ]: here, there = ('dest', 'src') else: here, there = ('src', 'dest') attr[here] = transfers[0].case_id # there not supported yet if transfers[0].subaction: attr['type'] = transfers[0].subaction return E.transfer( attr, *[tx.to_xml() for tx in transfers] )
def save(self, user): commtrack_user = CommTrackUser.wrap(user.to_json()) location_id = self.cleaned_data['supply_point'] if location_id: loc = Location.get(location_id) commtrack_user.clear_locations() commtrack_user.add_location(loc)
def process_transfers(E, transfers): if transfers: attr = { 'section-id': 'stock', } if transfers[0].action == const.RequisitionActions.RECEIPTS: attr['src'] = transfers[0].case_id sp = Location.get(transfers[0].location_id).linked_supply_point() attr['dest'] = sp._id else: if transfers[0].action in [ const.StockActions.RECEIPTS, const.RequisitionActions.FULFILL ]: here, there = ('dest', 'src') else: here, there = ('src', 'dest') attr[here] = transfers[0].case_id # there not supported yet if transfers[0].subaction: attr['type'] = transfers[0].subaction return E.transfer(attr, *[tx.to_xml() for tx in transfers])
def update_product_availability_facility_data(org_summary): # product availability facility = Location.get(docid=org_summary.location_id) assert facility.location_type == "FACILITY" prods = SQLProduct.objects.filter(domain=facility.domain, is_archived=False) for p in prods: product_data, created = ProductAvailabilityData.objects.get_or_create( product=p.product_id, location_id=facility._id, date=org_summary.date ) if created: # set defaults product_data.total = 1 previous_reports = ProductAvailabilityData.objects.filter( product=p.product_id, location_id=facility._id, date__lt=org_summary.date, total=1 ) if previous_reports.count(): prev = previous_reports.latest('date') product_data.with_stock = prev.with_stock product_data.without_stock = prev.without_stock product_data.without_data = prev.without_data else: # otherwise we use the defaults product_data.with_stock = 0 product_data.without_stock = 0 product_data.without_data = 1 product_data.save() assert (product_data.with_stock + product_data.without_stock + product_data.without_data) == 1, \ "bad product data config for %s" % product_data
def reparent_linked_docs(loc_id): # NOTE this updates linked docs for this location and all its descendant locations, but we never # actually update the paths of the descendant locations themselves. we get around this right now # by disallowing re-parenting of non-leaf locs. and if we did allow it, we'd want to update the # paths of the descendant locations immediately instead of in a batch job db = get_db() loc = Location.get(loc_id) startkey = [loc.domain, loc._id] linked_docs = [ row['doc'] for row in db.view('locations/linked_docs', startkey=startkey, endkey=startkey + [{}], include_docs=True) ] for doc in linked_docs: cur_path = doc['location_'] descendant_suffix = cur_path[cur_path.index(loc._id) + 1:] doc['location_'] = loc.path + descendant_suffix db.save_doc(doc) # TODO is it faster to save all docs in bulk? # think there's a slight possibility that newly submitted docs could slip through the cracks # but probably only if this task runs very shortly after the location is moved. don't worry # about this for now... maybe have a more comprehensive clean-up job (expensive) that runs # less frequently delattr(loc, 'flag_post_move') loc.save()
def load_locs_json(domain, selected_loc_id=None): """initialize a json location tree for drill-down controls on the client. tree is only partially initialized and branches will be filled in on the client via ajax. what is initialized: * all top level locs * if a 'selected' loc is provided, that loc and its complete ancestry """ def loc_to_json(loc): return {"name": loc.name, "location_type": loc.location_type, "uuid": loc._id} loc_json = [loc_to_json(loc) for loc in root_locations(domain)] # if a location is selected, we need to pre-populate its location hierarchy # so that the data is available client-side to pre-populate the drop-downs if selected_loc_id: selected = Location.get(selected_loc_id) lineage = list(Location.view("_all_docs", keys=selected.path, include_docs=True)) parent = {"children": loc_json} for loc in lineage: # find existing entry in the json tree that corresponds to this loc this_loc = [k for k in parent["children"] if k["uuid"] == loc._id][0] this_loc["children"] = [loc_to_json(loc) for loc in loc.children] parent = this_loc return loc_json
def load_locs_json(domain, selected_loc_id=None): """initialize a json location tree for drill-down controls on the client. tree is only partially initialized and branches will be filled in on the client via ajax. what is initialized: * all top level locs * if a 'selected' loc is provided, that loc and its complete ancestry """ def loc_to_json(loc): return { 'name': loc.name, 'location_type': loc.location_type, 'uuid': loc._id, } loc_json = [loc_to_json(loc) for loc in root_locations(domain)] # if a location is selected, we need to pre-populate its location hierarchy # so that the data is available client-side to pre-populate the drop-downs if selected_loc_id: selected = Location.get(selected_loc_id) lineage = list(Location.view('_all_docs', keys=selected.path, include_docs=True)) parent = {'children': loc_json} for loc in lineage: # find existing entry in the json tree that corresponds to this loc this_loc = [k for k in parent['children'] if k['uuid'] == loc._id][0] this_loc['children'] = [loc_to_json(loc) for loc in loc.children] parent = this_loc return loc_json
def handle(self, *args, **options): with open('location_results.csv', 'wb+') as csvfile: csv_writer = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL) csv_writer.writerow(['id', 'type', 'domain', 'property', 'value']) locations = list( set(Location.get_db().view( 'locations/by_type', reduce=False, wrapper=lambda row: row['id'], ).all())) problematic_domains = {} for loc in iter_docs(Location.get_db(), locations): loc = Location.get(loc['_id']) if self.has_any_hardcoded_properties(loc, csv_writer): if loc.domain in problematic_domains: problematic_domains[loc.domain] += 1 else: problematic_domains[loc.domain] = 1 self.stdout.write("\nDomain stats:\n") for domain, count in problematic_domains.iteritems(): self.stdout.write("%s: %d" % (domain, count))
def handle(self, *args, **options): try: loc_uuid = args[0] except IndexError: self.stderr.write('location uuid required\n') return try: loc = Location.get(loc_uuid) if not loc or loc.doc_type != 'Location': raise ValueError except Exception: self.stderr.write('doc [%s] does not appear to be a location\n' % loc_uuid) return self.db = get_db() startkey = [loc.domain] + loc.path locs = Location.view('locations/hierarchy', startkey=startkey, endkey=startkey + [{}], reduce=False, include_docs=True) for k in locs: if k._id == loc._id: # don't delete orig loc until very end, so we can resume task if interrupted continue self.delete_doc(k, loc) startkey = [loc.domain, loc._id] linked = self.db.view('locations/linked_docs', startkey=startkey, endkey=startkey + [{}], include_docs=True) for k in linked: self.delete_doc(k['doc'], loc) self.delete_doc(loc, loc)
def location_edit(request, domain, loc_id=None): parent_id = request.GET.get('parent') if loc_id: try: location = Location.get(loc_id) except ResourceNotFound: raise Http404 else: location = Location(domain=domain, parent=parent_id) if request.method == "POST": form = LocationForm(location, request.POST) if form.is_valid(): form.save() messages.success(request, 'Location saved!') return HttpResponseRedirect('%s?%s' % ( reverse('manage_locations', kwargs={'domain': domain}), urllib.urlencode({'selected': form.location._id}) )) else: form = LocationForm(location) context = { 'domain': domain, 'api_root': reverse('api_dispatch_list', kwargs={'domain': domain, 'resource_name': 'location', 'api_name': 'v0.3'}), 'location': location, 'form': form, } return render(request, 'locations/manage/location.html', context)
def update_product_availability_facility_data(org_summary): # product availability facility = Location.get(docid=org_summary.supply_point) assert facility.location_type == "FACILITY" prods = Product.ids_by_domain(facility.domain) for p in prods: product_data, created = ProductAvailabilityData.objects.get_or_create(product=p, supply_point=facility._id, date=org_summary.date) if created: # set defaults product_data.total = 1 previous_reports = ProductAvailabilityData.objects.filter(product=p, supply_point=facility._id, date__lt=org_summary.date) if previous_reports.count(): prev = previous_reports.order_by("-date")[0] product_data.with_stock = prev.with_stock product_data.without_stock = prev.without_stock product_data.without_data = prev.without_data # otherwise we use the defaults else: product_data.with_stock = 0 product_data.without_stock = 0 product_data.without_data = 1 product_data.save() assert (product_data.with_stock + product_data.without_stock + product_data.without_data) == 1, \ "bad product data config"
def handle(self, *args, **options): self.stdout.write("...\n") relevant_ids = set([r['id'] for r in CouchUser.get_db().view( 'users/by_username', reduce=False, ).all()]) to_save = [] for user_doc in iter_docs(CouchUser.get_db(), relevant_ids): if 'commtrack_location' in user_doc: user = CommCareUser.get(user_doc['_id']) try: original_location_object = Location.get(user['commtrack_location']) except ResourceNotFound: # if there was bad data in there before, we can ignore it continue user.set_locations([original_location_object]) del user_doc['commtrack_location'] to_save.append(user_doc) if len(to_save) > 500: CouchUser.get_db().bulk_save(to_save) to_save = [] if to_save: CouchUser.get_db().bulk_save(to_save)
def calculate_form_data(self, form): try: case_id = form["form"]["case"]["@case_id"] case = get_case_by_id(case_id) except KeyError: case = EMPTY_FIELD amount_due = EMPTY_FIELD if form["form"].get("registration_amount", None) is not None: amount_due = form["form"].get("registration_amount", None) elif form["form"].get("immunization_amount", None) is not None: amount_due = form["form"].get("immunization_amount", None) service_type = form["form"].get("service_type", EMPTY_FIELD) form_id = form["_id"] location_name = EMPTY_FIELD location_parent_name = EMPTY_FIELD location_id = form["form"].get("location_id", None) if location_id is not None: location = Location.get(location_id) location_name = location.name location_parent = location.parent if location_parent is not None and location_parent.location_type != 'state': while location_parent is not None and location_parent.location_type not in ('district', 'lga'): location_parent = location_parent.parent location_parent_name = location_parent.name if location_parent is not None else EMPTY_FIELD return {'case': case, 'service_type': service_type, 'location_name': location_name, 'location_parent_name': location_parent_name, 'amount_due': amount_due, 'form_id': form_id}
def get_wrapped_owner(owner_id): """ Returns the wrapped user or group object for a given ID, or None if the id isn't a known owner type. """ if not owner_id: return None def _get_class(doc_type): return { 'CommCareUser': CommCareUser, 'WebUser': WebUser, 'Group': Group, }.get(doc_type) try: return Location.get(owner_id) except ResourceNotFound: pass try: owner_doc = user_db().get(owner_id) except ResourceNotFound: pass else: cls = _get_class(owner_doc['doc_type']) return cls.wrap(owner_doc) if cls else None return None
def to_fixture(self): """ Generate a fixture representation of all locations available to the user <fixture id="fixture:user-locations" user_id="4ce8b1611c38e953d3b3b84dd3a7ac18"> <locations> <location name="Location 1" id="1039d1611c38e953d3b3b84ddc01d93" <!-- ... --> </locations> </fixture> """ root = ElementTree.Element('fixture', attrib={ 'id': self.id, 'user_id': self.user._id }) locations_element = ElementTree.Element('locations') locations = [] locations.append(Location.get(self.location_id)) for location in locations: location_element = ElementTree.Element('location', attrib={ 'name': location.name, 'id': location.get_id }) locations_element.append(location_element) root.append(locations_element) return root
def location(self): if hasattr(self, 'location_id'): try: return Location.get(self.location_id) except ResourceNotFound: pass return None
def reparent_linked_docs(loc_id): # NOTE this updates linked docs for this location and all its descendant locations, but we never # actually update the paths of the descendant locations themselves. we get around this right now # by disallowing re-parenting of non-leaf locs. and if we did allow it, we'd want to update the # paths of the descendant locations immediately instead of in a batch job db = get_db() loc = Location.get(loc_id) startkey = [loc.domain, loc._id] linked_docs = [row['doc'] for row in db.view('locations/linked_docs', startkey=startkey, endkey=startkey + [{}], include_docs=True)] for doc in linked_docs: cur_path = doc['location_'] descendant_suffix = cur_path[cur_path.index(loc._id)+1:] doc['location_'] = loc.path + descendant_suffix db.save_doc(doc) # TODO is it faster to save all docs in bulk? # think there's a slight possibility that newly submitted docs could slip through the cracks # but probably only if this task runs very shortly after the location is moved. don't worry # about this for now... maybe have a more comprehensive clean-up job (expensive) that runs # less frequently delattr(loc, 'flag_post_move') loc.save()
def not_responding_facility(org_summary): assert Location.get(docid=org_summary.supply_point).location_type == "FACILITY" def needed_status_types(org_summary): return [type for type in NEEDED_STATUS_TYPES if _is_valid_status(org_summary.supply_point, org_summary.date, type)] for type in needed_status_types(org_summary): gsum, created = GroupSummary.objects.get_or_create(org_summary=org_summary, title=type) gsum.total = 1 assert gsum.responded in (0, 1) if gsum.title == SupplyPointStatusTypes.SOH_FACILITY and not gsum.responded: # TODO: this might not be right unless we also clear it create_alert(org_summary.supply_point, org_summary.date, 'soh_not_responding', {'number': 1}) elif gsum.title == SupplyPointStatusTypes.R_AND_R_FACILITY and not gsum.responded: # TODO: this might not be right unless we also clear it create_alert(org_summary.supply_point, org_summary.date, 'rr_not_responded', {'number': 1}) elif gsum.title == SupplyPointStatusTypes.DELIVERY_FACILITY and not gsum.responded: # TODO: this might not be right unless we also clear it create_alert(org_summary.supply_point, org_summary.date, 'delivery_not_responding', {'number': 1}) else: # not an expected / needed group. ignore for now pass gsum.save()
def save(self, user): commtrack_user = CommTrackUser.wrap(user.to_json()) location_id = self.cleaned_data['supply_point'] if location_id: loc = Location.get(location_id) commtrack_user.clear_locations() commtrack_user.add_location(loc, create_sp_if_missing=True)
def get_location_by_type(form, type): loc = _get_location(form) for loc_id in loc.lineage: loc = Location.get(loc_id) if unicode(loc.location_type).lower().replace(" ", "") == type: return loc
def save(self, user): commtrack_user = CommTrackUser.wrap(user.to_json()) location_id = self.cleaned_data["supply_point"] if location_id: loc = Location.get(location_id) commtrack_user.clear_locations() commtrack_user.add_location(loc, create_sp_if_missing=True)
def create_mobile_worker(self, in_data): if not self.can_add_extra_users: return { 'error': _("No Permission."), } try: user_data = in_data['mobileWorker'] except KeyError: return { 'error': _("Please provide mobile worker data."), } try: form_data = {} for k, v in user_data.get('customFields', {}).items(): form_data["{}-{}".format(CUSTOM_DATA_FIELD_PREFIX, k)] = v for f in [ 'username', 'password', 'first_name', 'last_name', 'location_id' ]: form_data[f] = user_data[f] form_data['domain'] = self.domain self.request.POST = form_data except Exception as e: return {'error': _("Check your request: %s" % e)} if self.new_mobile_worker_form.is_valid( ) and self.custom_data.is_valid(): username = self.new_mobile_worker_form.cleaned_data['username'] password = self.new_mobile_worker_form.cleaned_data['password'] first_name = self.new_mobile_worker_form.cleaned_data['first_name'] last_name = self.new_mobile_worker_form.cleaned_data['last_name'] location_id = self.new_mobile_worker_form.cleaned_data[ 'location_id'] couch_user = CommCareUser.create( self.domain, format_username(username, self.domain), password, device_id="Generated from HQ", first_name=first_name, last_name=last_name, user_data=self.custom_data.get_data_to_save(), ) if location_id: couch_user.set_location(Location.get(location_id)) return { 'success': True, 'editUrl': reverse(EditCommCareUserView.urlname, args=[self.domain, couch_user.userID]) } return { 'error': _("Forms did not validate"), }
def location_sync(self, ews_location): try: sql_loc = SQLLocation.objects.get( domain=self.domain, external_id=int(ews_location.id) ) location = Loc.get(sql_loc.location_id) except SQLLocation.DoesNotExist: location = None if not location: if ews_location.parent_id: try: loc_parent = SQLLocation.objects.get( external_id=ews_location.parent_id, domain=self.domain ) loc_parent_id = loc_parent.location_id except SQLLocation.DoesNotExist: parent = self.endpoint.get_location(ews_location.parent_id) loc_parent = self.location_sync(Location(parent)) loc_parent_id = loc_parent._id location = Loc(parent=loc_parent_id) else: location = Loc() location.lineage = [] self._set_location_properties(location, ews_location) location.save() self._set_up_supply_point(location, ews_location) else: location_dict = { 'name': ews_location.name, 'latitude': float(ews_location.latitude) if ews_location.latitude else None, 'longitude': float(ews_location.longitude) if ews_location.longitude else None, 'site_code': ews_location.code.lower(), 'external_id': str(ews_location.id), } if apply_updates(location, location_dict): location.save() for supply_point in ews_location.supply_points: sp = SupplyPointCase.view('hqcase/by_domain_external_id', key=[self.domain, str(supply_point.id)], reduce=False, include_docs=True, limit=1).first() if sp: sql_location = sp.location.sql_location sql_location.stocks_all_products = False if not sql_location.products: sql_location.products = SQLProduct.objects.filter( domain=self.domain, code__in=supply_point.products ) sql_location.save() return location
def get_location_hierarchy_by_id(location_id, domain, CCT_only=False): if location_id is None or len(location_id) == 0: return [location.get_id for location in Location.by_domain(domain) if not CCT_only or _is_location_CCT(location)] else: user_location = Location.get(location_id) locations = [location.get_id for location in user_location.descendants if not CCT_only or _is_location_CCT(location)] if not CCT_only or _is_location_CCT(user_location): locations.insert(0, user_location.get_id) return locations
def _get_location(form): if 'location_id' in form.form: loc_id = form.form['location_id'] loc = Location.get(loc_id) else: user_id = form['auth_context']['user_id'] user = CommTrackUser.get(user_id) loc = user.location return loc
def location(self): from corehq.apps.locations.models import Location from couchdbkit.exceptions import ResourceNotFound if self.location_id is None: return None try: return Location.get(self.location_id) except ResourceNotFound: return None
def save(self, user): location_id = self.cleaned_data['location'] # This means it will clear the location associations set in a domain # with multiple locations configured. It is acceptable for now because # multi location config is a not really supported special flag for IPM. if location_id: if location_id != user.location_id: user.set_location(Location.get(location_id)) else: user.unset_location()
def supply_point_to_json(supply_point): base = { 'agentCode': supply_point.location.site_code, 'agentName': supply_point.name, 'active': not supply_point.closed, } if len(supply_point.location.lineage) > 0: parent_facility_code = Location.get(supply_point.location.lineage[0]).external_id base['parentFacilityCode'] = parent_facility_code # todo phone number return base
def __init__(self, domain, v): self.domain = domain self.v = v self.location = None u = v.owner if isinstance(u, CommCareUser): linked_loc_id = u.dynamic_properties().get('commtrack_location') if linked_loc_id: self.location = get_supply_point( self.domain.name, loc=Location.get(linked_loc_id))['case'] self.C = domain.commtrack_settings
def test_import_coordinates(self): data = { 'name': 'importedloc', 'latitude': 55, 'longitude': -55, } loc_id = import_location(self.domain.name, 'state', data)['id'] loc = Location.get(loc_id) self.assertEqual(data['latitude'], loc.latitude) self.assertEqual(data['longitude'], loc.longitude)
def save(self, user): commtrack_user = CommTrackUser.wrap(user.to_json()) location_id = self.cleaned_data['supply_point'] if location_id: loc = Location.get(location_id) commtrack_user.clear_locations() commtrack_user.add_location(loc, create_sp_if_missing=True) # add the supply point case id to user data fields # so that the phone can auto select supply_point = SupplyPointCase.get_by_location(loc) user.user_data['commtrack-supply-point'] = supply_point._id
def test_import_with_existing_parent_by_site_code(self): data = { 'name': 'importedloc', 'parent_site_code': self.test_state.site_code } result = import_location(self.domain.name, 'district', data) if result['id'] is None: self.fail('import failed with error: %s' % result['message']) self.assertTrue(data['name'] in self.names_of_locs()) new_loc = Location.get(result['id']) self.assertEqual(new_loc.parent_id, self.test_state._id)
def process_facility_statuses(facility_id, statuses, alerts=True): """ For a given facility and list of statuses, update the appropriate data warehouse tables. This should only be called on supply points that are facilities. """ facility = Location.get(facility_id) for status in statuses: warehouse_date = _get_window_date(status.status_type, status.status_date) if _is_valid_status(facility, status.status_date, status.status_type): org_summary = OrganizationSummary.objects.get_or_create( location_id=facility_id, date=warehouse_date)[0] group_summary = GroupSummary.objects.get_or_create( org_summary=org_summary, title=status.status_type)[0] group_summary.total = 1 if status.status_value not in ( SupplyPointStatusValues.REMINDER_SENT, SupplyPointStatusValues.ALERT_SENT): # we've responded to this query group_summary.responded = 1 if status.status_value in [ SupplyPointStatusValues.SUBMITTED, SupplyPointStatusValues.RECEIVED ]: group_summary.complete = 1 else: group_summary.complete = group_summary.complete or 0 if group_summary.complete: if is_on_time(status.status_date, warehouse_date, status.status_type): group_summary.on_time = 1 else: group_summary.on_time = group_summary.on_time else: group_summary.on_time = 0 group_summary.save() if alerts: if status.status_value == SupplyPointStatusValues.NOT_SUBMITTED \ and status.status_type == SupplyPointStatusTypes.R_AND_R_FACILITY: create_alert(facility_id, status.status_date, const.RR_NOT_SUBMITTED, {'number': 1}) if status.status_value == SupplyPointStatusValues.NOT_RECEIVED \ and status.status_type == SupplyPointStatusTypes.DELIVERY_FACILITY: create_alert(facility_id, status.status_date, const.DELIVERY_NOT_RECEIVED, {'number': 1})