コード例 #1
0
ファイル: test_dbaccessors.py プロジェクト: ekush/commcare-hq
 def setUpClass(cls):
     cls.domain = 'supply-point-dbaccessors'
     cls.locations = [
         Location(domain=cls.domain),
         Location(domain=cls.domain),
         Location(domain=cls.domain),
     ]
     Location.get_db().bulk_save(cls.locations)
     cls.supply_points = [
         CommCareCase(domain=cls.domain,
                      type='supply-point',
                      location_id=cls.locations[0]._id),
         CommCareCase(domain=cls.domain,
                      type='supply-point',
                      location_id=cls.locations[1]._id),
         CommCareCase(domain=cls.domain,
                      type='supply-point',
                      location_id=cls.locations[2]._id),
     ]
     locations_by_id = {
         location._id: location
         for location in cls.locations
     }
     cls.location_supply_point_pairs = [
         (locations_by_id[supply_point.location_id], supply_point)
         for supply_point in cls.supply_points
     ]
     CommCareCase.get_db().bulk_save(cls.supply_points)
コード例 #2
0
ファイル: util.py プロジェクト: elbowink/commcare-hq
def purge_locations(domain):
    """
    Delete all location data associated with <domain>.

    This means Locations, SQLLocations, LocationTypes, and anything which
    has a ForeignKey relationship to SQLLocation (as of 2015-03-02, this
    includes only StockStates and some custom stuff).
    """
    location_ids = set([r['id'] for r in Location.get_db().view(
        'locations/by_type',
        reduce=False,
        startkey=[domain],
        endkey=[domain, {}],
    ).all()])
    iter_bulk_delete(Location.get_db(), location_ids)

    for loc in SQLLocation.objects.filter(domain=domain).iterator():
        if loc.supply_point_id:
            case = CommCareCase.get(loc.supply_point_id)
            case.delete()
        loc.delete()

    db = Domain.get_db()
    domain_obj = Domain.get_by_name(domain)  # cached lookup is fast but stale
    domain_json = db.get(domain_obj._id)  # get latest raw, unwrapped doc
    domain_json.pop('obsolete_location_types', None)
    domain_json.pop('location_types', None)
    db.save_doc(domain_json)
コード例 #3
0
    def handle(self, *args, **options):
        with open('location_results.csv', 'wb+') as csvfile:
            csv_writer = csv.writer(
                csvfile,
                delimiter=',',
                quotechar='|',
                quoting=csv.QUOTE_MINIMAL
            )

            csv_writer.writerow(['id', 'type', 'domain', 'property', 'value'])

            locations = list(set(Location.get_db().view(
                'locations/by_type',
                reduce=False,
                wrapper=lambda row: row['id'],
            ).all()))

            problematic_domains = {}

            for loc in iter_docs(Location.get_db(), locations):
                loc = Location.get(loc['_id'])
                if self.has_any_hardcoded_properties(loc, csv_writer):
                    if loc.domain in problematic_domains:
                        problematic_domains[loc.domain] += 1
                    else:
                        problematic_domains[loc.domain] = 1

            self.stdout.write("\nDomain stats:\n")
            for domain, count in problematic_domains.iteritems():
                self.stdout.write("%s: %d" % (domain, count))
コード例 #4
0
    def handle(self, *args, **options):
        with open('location_results.csv', 'wb+') as csvfile:
            csv_writer = csv.writer(csvfile,
                                    delimiter=',',
                                    quotechar='|',
                                    quoting=csv.QUOTE_MINIMAL)

            csv_writer.writerow(['id', 'type', 'domain', 'property', 'value'])

            locations = list(
                set(Location.get_db().view(
                    'locations/by_type',
                    reduce=False,
                    wrapper=lambda row: row['id'],
                ).all()))

            problematic_domains = {}

            for loc in iter_docs(Location.get_db(), locations):
                loc = Location.get(loc['_id'])
                if self.has_any_hardcoded_properties(loc, csv_writer):
                    if loc.domain in problematic_domains:
                        problematic_domains[loc.domain] += 1
                    else:
                        problematic_domains[loc.domain] = 1

            self.stdout.write("\nDomain stats:\n")
            for domain, count in problematic_domains.iteritems():
                self.stdout.write("%s: %d" % (domain, count))
コード例 #5
0
    def handle(self, domain_name, **options):
        domain = Domain.get_by_name(domain_name)
        if not domain:
            print(u'domain with name "{}" not found'.format(domain_name))
            return

        sql_location_qs = SQLLocation.objects.filter(domain=domain_name)
        print('checking {} locations for issues'.format(sql_location_qs.count()))
        couch_locations_to_save = []
        for sql_location in sql_location_qs:
            if sql_location.lineage != sql_location.couch_location.lineage:
                print('would change lineage of {} from {} to {}'.format(
                    sql_location.name,
                    '-->'.join(sql_location.couch_location.lineage),
                    '-->'.join(sql_location.lineage),
                ))
                sql_location.couch_location.lineage = sql_location.lineage
                couch_locations_to_save.append(sql_location.couch_location.to_json())

        if couch_locations_to_save:
            if not options['noinput']:
                confirm = raw_input(
                    u"""
                    Would you like to commit these changes? {} locations will be affected. (y/n)
                    """.format(len(couch_locations_to_save))
                )
                if confirm != 'y':
                    print("\n\t\taborted")
                    return
            print(u"Committing changes")
            Location.get_db().bulk_save(couch_locations_to_save)
            print("Operation completed")
        else:
            print('no issues found')
コード例 #6
0
ファイル: updater.py プロジェクト: philipkaare/commcare-hq
def get_non_archived_facilities_below(location):
    child_ids = (
        location.sql_location.get_descendants(include_self=True)
        .filter(is_archived=False, location_type__name="FACILITY")
        .values_list("location_id", flat=True)
    )
    return [Location.wrap(doc) for doc in get_docs(Location.get_db(), child_ids)]
コード例 #7
0
    def handle(self, *args, **options):
        try:
            domain = args[0]
        except IndexError:
            self.stderr.write('domain required\n')
            return

        self.println('Migrating...')

        for loc_id, case in get_supply_points_json_in_domain_by_location(domain):
            loc = Location.get(loc_id)

            old_code = case.get('site_code', '')
            new_code = getattr(loc, 'site_code', '')

            if old_code and not new_code:
                loc.site_code = old_code
                loc.save()
                self.println('migrated %s (%s)' % (loc.name, loc.site_code))

        self.println('Verifying code uniqueness...')

        all_codes = Location.get_db().view('commtrack/locations_by_code',
                                           startkey=[domain], endkey=[domain, {}])
        locs_by_code = map_reduce(lambda e: [(e['key'][-1].lower(), e['id'])], data=all_codes)
        for code, loc_ids in locs_by_code.iteritems():
            if len(loc_ids) == 1:
                continue

            self.println('duplicate code [%s]' % code)
            locs = Location.view('_all_docs', keys=loc_ids, include_docs=True)
            for loc in locs:
                self.println('  %s [%s]' % (loc.name, loc._id))
コード例 #8
0
def get_wrapped_owner(owner_id):
    """
    Returns the wrapped user or group object for a given ID, or None
    if the id isn't a known owner type.
    """
    if not owner_id:
        return None

    def _get_class(doc_type):
        return {
            'CommCareUser': CommCareUser,
            'WebUser': WebUser,
            'Group': Group,
            'Location': Location,
        }.get(doc_type)

    for db in [user_db(), Location.get_db()]:
        try:
            owner_doc = db.get(owner_id)
        except ResourceNotFound:
            continue
        else:
            cls = _get_class(owner_doc['doc_type'])
            return cls.wrap(owner_doc) if cls else None
    return None
コード例 #9
0
ファイル: cases.py プロジェクト: ansarbek/commcare-hq
def get_wrapped_owner(owner_id):
    """
    Returns the wrapped user or group object for a given ID, or None
    if the id isn't a known owner type.
    """
    if not owner_id:
        return None

    def _get_class(doc_type):
        return {
            'CommCareUser': CommCareUser,
            'WebUser': WebUser,
            'Group': Group,
            'Location': Location,
        }.get(doc_type)

    for db in [user_db(), Location.get_db()]:
        try:
            owner_doc = db.get(owner_id)
        except ResourceNotFound:
            continue
        else:
            cls = _get_class(owner_doc['doc_type'])
            return cls.wrap(owner_doc) if cls else None
    return None
コード例 #10
0
def iter_location_join_supply_point(all_location_ids, chunksize=100):

    # this function was copy-paste-modified from iter_docs

    database = Location.get_db()
    for location_ids in chunked(all_location_ids, chunksize):
        # sync supply point id
        locations = [row.get('doc')
                     for row in get_docs(database, keys=location_ids)
                     if row.get('doc')
                     and row.get('doc')['domain'] not in EXCLUDE_DOMAINS]

        supply_points = SupplyPointCase.view(
            'commtrack/supply_point_by_loc',
            keys=[[location['domain'], location['_id']]
                  for location in locations],
            include_docs=True,
            classes={'CommCareCase': SupplyPointCase},
        ).all()

        supply_points_index = {}

        for supply_point in supply_points:
            key = (supply_point.domain, supply_point.location_id)
            if key in supply_points_index:
                raise Exception(
                    "Multiple supply points have "
                    "domain={!r}, location_id={!r}".format(*key))
            supply_points_index[key] = supply_point

        for location in locations:
            yield (
                location,
                supply_points_index.get((location['domain'], location['_id']))
            )
コード例 #11
0
    def bulk_delete_locs(self, loc_ids, total):
        locs_to_save = []
        count = 0
        for loc in iter_docs(Location.get_db(), loc_ids):
            loc['doc_type'] = "{}{}".format(loc['doc_type'], DELETED_SUFFIX)
            loc['is_archived'] = True
            locs_to_save.append(loc)
            count += 1

            if len(locs_to_save) > 100:
                Location.get_db().bulk_save(locs_to_save)
                locs_to_save = []
                print "{} of {}".format(count, total)
                time.sleep(5)

        if locs_to_save:
            Location.get_db().bulk_save(locs_to_save)
コード例 #12
0
    def bulk_delete_locs(self, loc_ids, total):
        locs_to_save = []
        count = 0
        for loc in iter_docs(Location.get_db(), loc_ids):
            loc['doc_type'] = "{}{}".format(loc['doc_type'], DELETED_SUFFIX)
            loc['is_archived'] = True
            locs_to_save.append(loc)
            count += 1

            if len(locs_to_save) > 100:
                Location.get_db().bulk_save(locs_to_save)
                locs_to_save = []
                print "{} of {}".format(count, total)
                time.sleep(5)

        if locs_to_save:
            Location.get_db().bulk_save(locs_to_save)
コード例 #13
0
def get_non_archived_facilities_below(location):
    child_ids = location.sql_location.get_descendants(
        include_self=True).filter(is_archived=False,
                                  location_type__name='FACILITY').values_list(
                                      'location_id', flat=True)
    return [
        Location.wrap(doc) for doc in get_docs(Location.get_db(), child_ids)
    ]
コード例 #14
0
    def get_data(self):
        # todo: this will probably have to paginate eventually
        if self.all_relevant_forms:
            sp_ids = get_relevant_supply_point_ids(
                self.domain,
                self.active_location,
            )

            form_xmlnses = [form['xmlns'] for form in self.all_relevant_forms.values()]
            spoint_loc_map = {
                doc['_id']: doc['location_id']
                for doc in iter_docs(SupplyPointCase.get_db(), sp_ids)
            }
            locations = {
                doc['_id']: Location.wrap(doc)
                for doc in iter_docs(Location.get_db(), spoint_loc_map.values())
            }

            for spoint_id, loc_id in spoint_loc_map.items():
                loc = locations[loc_id]

                form_ids = StockReport.objects.filter(
                    stocktransaction__case_id=spoint_id
                ).exclude(
                    date__lte=self.start_date
                ).exclude(
                    date__gte=self.end_date
                ).values_list(
                    'form_id', flat=True
                ).order_by('-date').distinct()  # not truly distinct due to ordering
                matched = False
                for form_id in form_ids:
                    try:
                        if XFormInstance.get(form_id).xmlns in form_xmlnses:
                            yield {
                                'loc_id': loc._id,
                                'loc_path': loc.path,
                                'name': loc.name,
                                'type': loc.location_type,
                                'reporting_status': 'reporting',
                                'geo': loc._geopoint,
                            }
                            matched = True
                            break
                    except ResourceNotFound:
                        logging.error('Stock report for location {} in {} references non-existent form {}'.format(
                            loc._id, loc.domain, form_id
                        ))
                if not matched:
                    yield {
                        'loc_id': loc._id,
                        'loc_path': loc.path,
                        'name': loc.name,
                        'type': loc.location_type,
                        'reporting_status': 'nonreporting',
                        'geo': loc._geopoint,
                    }
コード例 #15
0
ファイル: util.py プロジェクト: ansarbek/commcare-hq
def delete_all_locations():
    ids = [
        doc['id'] for doc in
        SupplyPointCase.get_db().view('supply_point_by_loc/view', reduce=False).all()
    ]
    iter_bulk_delete(SupplyPointCase.get_db(), ids)

    iter_bulk_delete(Location.get_db(), SQLLocation.objects.location_ids())

    SQLLocation.objects.all().delete()
コード例 #16
0
ファイル: views.py プロジェクト: thedevelopermw/commcare-hq
def location_dump(request, domain):
    loc_ids = [row['id'] for row in Location.view('commtrack/locations_by_code', startkey=[domain], endkey=[domain, {}])]
    
    resp = HttpResponse(content_type='text/csv')
    resp['Content-Disposition'] = 'attachment; filename="locations_%s.csv"' % domain

    w = csv.writer(resp)
    w.writerow(['UUID', 'Location Type', 'SMS Code'])
    for raw in iter_docs(Location.get_db(), loc_ids):
        loc = Location.wrap(raw)
        w.writerow([loc._id, loc.location_type, loc.site_code])
    return resp
コード例 #17
0
ファイル: views.py プロジェクト: dszafranek/commcare-hq
def location_dump(request, domain):
    loc_ids = [row['id'] for row in Location.view('commtrack/locations_by_code', startkey=[domain], endkey=[domain, {}])]
    
    resp = HttpResponse(content_type='text/csv')
    resp['Content-Disposition'] = 'attachment; filename="locations_%s.csv"' % domain

    w = csv.writer(resp)
    w.writerow(['UUID', 'Location Type', 'SMS Code'])
    for raw in iter_docs(Location.get_db(), loc_ids):
        loc = Location.wrap(raw)
        w.writerow([loc._id, loc.location_type, loc.site_code])
    return resp
コード例 #18
0
    def handle(self, *args, **options):
        relevant_ids = set([r['id'] for r in Location.get_db().view(
            'locations/by_type',
            reduce=False,
        ).all()])

        total_locs = len(relevant_ids)
        print 'processing {} locations'.format(total_locs)
        total_saved = 0
        queue = []
        for i, loc in enumerate(iter_docs(Location.get_db(), relevant_ids)):
            if loc['domain'] in PSI_DOMAINS:
                loc['doc_type'] = 'Location-Deleted'
                queue.append(loc)

            if len(queue) > 500:
                Location.get_db().bulk_save(queue)
                total_saved += len(queue)
                queue = []
                print 'saved {} locations ({}/{} seen)'.format(total_saved, i, total_locs)
                time.sleep(5)

        if queue:
            total_saved += len(queue)
            Location.get_db().bulk_save(queue)

        print 'successfully archived {}/{} locations'.format(total_saved, total_locs)
コード例 #19
0
    def handle(self, *args, **options):
        self.stdout.write("Processing locations...\n")

        relevant_ids = set([r['id'] for r in Location.get_db().view(
            'commtrack/locations_by_code',
            reduce=False,
        ).all()])

        to_save = []

        for location in iter_docs(Location.get_db(), relevant_ids):
            # exclude any psi domain to make this take a realistic
            # amount fo time
            if (
                not location.get('last_modified', False) and
                'psi' not in location.get('domain', '')
            ):
                location['last_modified'] = datetime.now().isoformat()
                to_save.append(location)

                if len(to_save) > 500:
                    Location.get_db().bulk_save(to_save)
                    to_save = []

        if to_save:
            Location.get_db().bulk_save(to_save)
コード例 #20
0
 def setUpClass(cls):
     cls.domain = 'supply-point-dbaccessors'
     cls.locations = [
         Location(domain=cls.domain),
         Location(domain=cls.domain),
         Location(domain=cls.domain),
     ]
     Location.get_db().bulk_save(cls.locations)
     cls.supply_points = [
         CommCareCase(domain=cls.domain, type='supply-point',
                      location_id=cls.locations[0]._id),
         CommCareCase(domain=cls.domain, type='supply-point',
                      location_id=cls.locations[1]._id),
         CommCareCase(domain=cls.domain, type='supply-point',
                      location_id=cls.locations[2]._id),
     ]
     locations_by_id = {location._id: location
                        for location in cls.locations}
     cls.location_supply_point_pairs = [
         (locations_by_id[supply_point.location_id], supply_point)
         for supply_point in cls.supply_points
     ]
     CommCareCase.get_db().bulk_save(cls.supply_points)
コード例 #21
0
 def setUp(self):
     # we have to set the fake database before any other calls
     self.domain = "test-loc-parent-id"
     self.evaluation_context = EvaluationContext({"domain": self.domain})
     self.orig_db = Location.get_db()
     self.database = FakeCouchDb()
     Location.set_db(self.database)
     self.parent = self._make_location(_id=uuid.uuid4().hex)
     self.child = self._make_location(_id=uuid.uuid4().hex, lineage=[self.parent._id])
     self.grandchild = self._make_location(_id=uuid.uuid4().hex, lineage=[self.child._id, self.parent._id])
     self.expression_spec = {
         "type": "location_parent_id",
         "location_id_expression": {"type": "property_name", "property_name": "location_id"},
     }
     self.expression = ExpressionFactory.from_spec(self.expression_spec)
コード例 #22
0
def lookup_by_property(domain, prop_name, val, scope, root=None):
    if root and not isinstance(root, basestring):
        root = root._id

    index_view = "locations/prop_index_%s" % prop_name

    startkey = [domain, val]
    if scope == "global":
        startkey.append(None)
    elif scope == "descendant":
        startkey.append(root)
    elif scope == "child":
        startkey.extend([root, 1])
    else:
        raise ValueError("invalid scope type")

    return set(row["id"] for row in Location.get_db().view(index_view, startkey=startkey, endkey=startkey + [{}]))
コード例 #23
0
ファイル: util.py プロジェクト: nikhilverma24/commcare-hq
def lookup_by_property(domain, prop_name, val, scope, root=None):
    if root and not isinstance(root, basestring):
        root = root._id

    index_view = 'locations/prop_index_%s' % prop_name

    startkey = [domain, val]
    if scope == 'global':
        startkey.append(None)
    elif scope == 'descendant':
        startkey.append(root)
    elif scope == 'child':
        startkey.extend([root, 1])
    else:
        raise ValueError('invalid scope type')

    return set(row['id'] for row in Location.get_db().view(index_view, startkey=startkey, endkey=startkey + [{}]))
コード例 #24
0
    def handle(self, *args, **options):
        try:
            domain = args[0]
        except IndexError:
            self.stderr.write('domain required\n')
            return

        self.println('Migrating...')

        supply_point_cases = CommCareCase.get_db().view(
            'commtrack/supply_point_by_loc',
            startkey=[domain],
            endkey=[domain, {}],
            include_docs=True
        )

        for result in supply_point_cases:
            loc_id = result['key'][-1]
            loc = Location.get(loc_id)
            case = result['doc']

            old_code = case.get('site_code', '')
            new_code = getattr(loc, 'site_code', '')

            if old_code and not new_code:
                loc.site_code = old_code
                loc.save()
                self.println('migrated %s (%s)' % (loc.name, loc.site_code))

        self.println('Verifying code uniqueness...')

        all_codes = Location.get_db().view('commtrack/locations_by_code',
                                           startkey=[domain], endkey=[domain, {}])
        locs_by_code = map_reduce(lambda e: [(e['key'][-1].lower(), e['id'])], data=all_codes)
        for code, loc_ids in locs_by_code.iteritems():
            if len(loc_ids) == 1:
                continue

            self.println('duplicate code [%s]' % code)
            locs = Location.view('_all_docs', keys=loc_ids, include_docs=True)
            for loc in locs:
                self.println('  %s [%s]' % (loc.name, loc._id))
コード例 #25
0
 def setUp(self):
     # we have to set the fake database before any other calls
     self.domain = 'test-loc-parent-id'
     self.evaluation_context = EvaluationContext({"domain": self.domain})
     self.orig_db = Location.get_db()
     self.database = FakeCouchDb()
     Location.set_db(self.database)
     self.parent = self._make_location(_id=uuid.uuid4().hex)
     self.child = self._make_location(_id=uuid.uuid4().hex,
                                      lineage=[self.parent._id])
     self.grandchild = self._make_location(
         _id=uuid.uuid4().hex, lineage=[self.child._id, self.parent._id])
     self.expression_spec = {
         "type": "location_parent_id",
         "location_id_expression": {
             "type": "property_name",
             "property_name": "location_id",
         }
     }
     self.expression = ExpressionFactory.from_spec(self.expression_spec)
コード例 #26
0
ファイル: util.py プロジェクト: jmaina/commcare-hq
def lookup_by_property(domain, prop_name, val, scope, root=None):
    if root and not isinstance(root, basestring):
        root = root._id

    if prop_name == 'site_code':
        index_view = 'locations/prop_index_site_code'
    else:
        # this was to be backwards compatible with the api
        # if this ever comes up, please take a moment to decide whether it's
        # worth changing the API to raise a less nonsensical error
        # (or change this function to not sound so general!)
        raise ResourceNotFound('missing prop_index_%s' % prop_name)

    startkey = [domain, val]
    if scope == 'global':
        startkey.append(None)
    elif scope == 'descendant':
        startkey.append(root)
    elif scope == 'child':
        startkey.extend([root, 1])
    else:
        raise ValueError('invalid scope type')

    return set(row['id'] for row in Location.get_db().view(index_view, startkey=startkey, endkey=startkey + [{}]))
コード例 #27
0
ファイル: util.py プロジェクト: amonkeykong81/commcare-hq
def lookup_by_property(domain, prop_name, val, scope, root=None):
    if root and not isinstance(root, basestring):
        root = root._id

    if prop_name == 'site_code':
        index_view = 'locations/prop_index_site_code'
    else:
        # this was to be backwards compatible with the api
        # if this ever comes up, please take a moment to decide whether it's
        # worth changing the API to raise a less nonsensical error
        # (or change this function to not sound so general!)
        raise ResourceNotFound('missing prop_index_%s' % prop_name)

    startkey = [domain, val]
    if scope == 'global':
        startkey.append(None)
    elif scope == 'descendant':
        startkey.append(root)
    elif scope == 'child':
        startkey.extend([root, 1])
    else:
        raise ValueError('invalid scope type')

    return set(row['id'] for row in Location.get_db().view(index_view, startkey=startkey, endkey=startkey + [{}]))
コード例 #28
0
    def handle(self, *args, **options):
        self.stdout.write("Populating site codes...\n")

        site_codes_by_domain = {}

        relevant_ids = set([r['id'] for r in Location.get_db().view(
            'locations/by_type',
            reduce=False,
        ).all()])

        to_save = []

        for loc in iter_docs(Location.get_db(), relevant_ids):
            if not loc['site_code']:
                # triggering the safe will cause this to get populated
                self.stdout.write("Updating location %s\n" % loc['name'])

                if loc['domain'] not in site_codes_by_domain:
                    site_codes_by_domain[loc['domain']] = list(
                        Location.site_codes_for_domain(loc['domain'])
                    )

                loc['site_code'] = generate_site_code(
                    loc['name'],
                    site_codes_by_domain[loc['domain']]
                )
                site_codes_by_domain[loc['domain']].append(loc['site_code'])

                to_save.append(loc)

                if len(to_save) > 500:
                    Location.get_db().bulk_save(to_save)
                    to_save = []

        if to_save:
            Location.get_db().bulk_save(to_save)
コード例 #29
0
 def tearDown(self):
     self.user.delete()
     SQLLocation.objects.all().delete()
     delete_all_docs_by_doc_type(Location.get_db(), ['Location'])
     super(LocationsTest, self).tearDown()
コード例 #30
0
ファイル: models.py プロジェクト: amonkeykong81/commcare-hq
 def _gen():
     location_ids = [sp.location_id for sp in self.get_linked_supply_points()]
     for doc in iter_docs(Location.get_db(), location_ids):
         yield Location.wrap(doc)
コード例 #31
0
    def forwards(self, orm):
        # hack: manually force sync Location design docs before
        # we try to load from them
        sync_docs.sync(location_models, verbosity=2)

        properties_to_sync = [
            ('location_id', '_id'),
            'domain',
            'name',
            'location_type',
            'site_code',
            'external_id',
            'latitude',
            'longitude',
            'is_archived',
        ]

        location_ids = set([r['id'] for r in Location.get_db().view(
            'locations/by_name',
            reduce=False,
        ).all()])

        for location, sp in iter_location_join_supply_point(location_ids):
            try:
                sql_location = orm.SQLLocation.objects.get(location_id=location['_id'])
            except orm.SQLLocation.DoesNotExist:
                # this populates bogus mptt data because otherwise
                # null constraints will blow up but do not worry, we
                # rebuild this at the end
                sql_location = orm.SQLLocation.objects.create(
                    location_id=location['_id'],
                    lft=0,
                    rght=0,
                    tree_id=0,
                    level=0
                )

            for prop in properties_to_sync:
                if isinstance(prop, tuple):
                    sql_prop, couch_prop = prop
                else:
                    sql_prop = couch_prop = prop

                if couch_prop in location:
                    setattr(sql_location, sql_prop, location[couch_prop])

            if sp:
                sql_location.supply_point_id = sp._id

            # sync parent connection
            lineage = location.get('lineage', None)
            if lineage:
                try:
                    sql_location.parent = orm.SQLLocation.objects.get(location_id=lineage[0])
                except orm.SQLLocation.DoesNotExist:
                    # create a placeholder for the parent if it does
                    # not yet exist, assuming that it will be properly
                    # populated with data when its turn comes up in the
                    # loop
                    sql_location.parent = orm.SQLLocation.objects.create(
                        location_id=lineage[0],
                        lft=0,
                        rght=0,
                        tree_id=0,
                        level=0
                    )
                    sql_location.parent.save()

            sql_location.save()

        # this is the important bit that rebuilds mptt tree structures
        SQLLocation.objects.rebuild()