def test_update(self):
        data_item = self._create_data_item()

        data_item_update = {
            "data_type_id": self.data_type._id,
            "fields": {
                "state_name": {
                    "field_list": [
                        {"field_value": "Massachusetts", "properties": {"lang": "en"}},
                        {"field_value": "马萨诸塞", "properties": {"lang": "zh"}},
                    ]
                }
            },
            "item_attributes": {
                "attribute1": "cool_attr_value",
            }
        }

        response = self._assert_auth_post_resource(
            self.single_endpoint(data_item._id), json.dumps(data_item_update), method="PUT")
        data_item = FixtureDataItem.get(data_item._id)
        self.assertEqual(response.status_code, 204)
        self.assertEqual(data_item.data_type_id, self.data_type._id)
        self.assertEqual(len(data_item.fields), 1)
        self.assertEqual(data_item.fields['state_name'].field_list[0].field_value, 'Massachusetts')
        self.assertEqual(data_item.fields['state_name'].field_list[0].properties, {"lang": "en"})
        self.assertEqual(data_item.item_attributes, {"attribute1": "cool_attr_value"})
Beispiel #2
0
    def obj_update(self, bundle, **kwargs):
        if 'data_type_id' not in bundle.data:
            raise BadRequest("data_type_id must be specified")

        try:
            bundle.obj = FixtureDataItem.get(kwargs['pk'])
        except ResourceNotFound:
            raise NotFound('Lookup table item not found')

        if bundle.obj.domain != kwargs['domain']:
            raise NotFound('Lookup table item not found')

        save = False
        if 'fields' in bundle.data:
            save = True
            bundle.obj.fields = {
                field_name: FieldList.wrap(field_list)
                for field_name, field_list in bundle.data['fields'].items()
            }

        if 'item_attributes' in bundle.data:
            save = True
            bundle.obj.item_attributes = bundle.data['item_attributes']

        if save:
            bundle.obj.save()

        return bundle
Beispiel #3
0
def _process_data_item(domain, replace, data_type, di, item_fields,
                       item_attributes, sort_key):
    """Processes a FixtureDataItem from its excel upload.

    Returns a tuple with
      - (unsaved) FixtureDataItem instance
      - boolean flag to indiicate if the data item was deleted
      - a list of errors
    """
    delete = False
    errors = []
    new_data_item = FixtureDataItem(domain=domain,
                                    data_type_id=data_type.get_id,
                                    fields=item_fields,
                                    item_attributes=item_attributes,
                                    sort_key=sort_key)
    try:
        if di['UID'] and not replace:
            old_data_item = FixtureDataItem.get(di['UID'])
        else:
            old_data_item = new_data_item
        old_data_item.fields = item_fields
        old_data_item.item_attributes = item_attributes
        if (old_data_item.domain != domain
                or not old_data_item.data_type_id == data_type.get_id):
            old_data_item = new_data_item
            errors.append(
                _("'%(UID)s' is not a valid UID. But the new item is created.")
                % {'UID': di['UID']})
        if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
            delete = True
    except (ResourceNotFound, KeyError):
        old_data_item = new_data_item

    return old_data_item, delete, errors
Beispiel #4
0
    def generate_lineage(self, leaf_type, leaf_item_id):
        leaf_fdi = FixtureDataItem.get(leaf_item_id)

        index = None
        for i, h in enumerate(self.hierarchy[::-1]):
            if h["type"] == leaf_type:
                index = i

        if index is None:
            raise Exception(
                "Could not generate lineage for AsyncDrillableFilter due to a nonexistent leaf_type (%s)"
                % leaf_type)

        lineage = [leaf_fdi]
        for i, h in enumerate(self.full_hierarchy[::-1]):
            if i < index or i >= len(self.hierarchy) - 1:
                continue
            real_index = len(self.hierarchy) - (i + 1)
            lineage.insert(
                0, FixtureDataItem.by_field_value(
                    self.domain,
                    self.data_types(real_index - 1),
                    h["references"],
                    lineage[0].fields_without_attributes[h["parent_ref"]]
                ).one())

        return lineage
Beispiel #5
0
    def generate_lineage(self, leaf_type, leaf_item_id):
        leaf_fdi = FixtureDataItem.get(leaf_item_id)

        index = None
        for i, h in enumerate(self.hierarchy[::-1]):
            if h["type"] == leaf_type:
                index = i

        if index is None:
            raise Exception(
                "Could not generate lineage for AsyncDrillableFilter due to a nonexistent leaf_type (%s)"
                % leaf_type)

        lineage = [leaf_fdi]
        for i, h in enumerate(self.full_hierarchy[::-1]):
            if i < index or i >= len(self.hierarchy) - 1:
                continue
            real_index = len(self.hierarchy) - (i + 1)
            lineage.insert(
                0,
                FixtureDataItem.by_field_value(
                    self.domain, self.data_types(real_index - 1),
                    h["references"], lineage[0].fields_without_attributes[
                        h["parent_ref"]]).one())

        return lineage
Beispiel #6
0
    def obj_update(self, bundle, **kwargs):
        if 'data_type_id' not in bundle.data:
            raise BadRequest("data_type_id must be specified")

        try:
            bundle.obj = FixtureDataItem.get(kwargs['pk'])
        except ResourceNotFound:
            raise NotFound('Lookup table item not found')

        if bundle.obj.domain != kwargs['domain']:
            raise NotFound('Lookup table item not found')

        save = False
        if 'fields' in bundle.data:
            save = True
            bundle.obj.fields = {
                field_name: FieldList.wrap(field_list)
                for field_name, field_list in bundle.data['fields'].items()
            }

        if 'item_attributes' in bundle.data:
            save = True
            bundle.obj.item_attributes = bundle.data['item_attributes']

        if save:
            bundle.obj.save()

        return bundle
Beispiel #7
0
def data_items(request, domain, data_type_id, data_item_id):

    def prepare_item(item):
        ret = strip_json(item, disallow=['data_type_id'])
        if request.GET.get('groups') == 'true':
            ret['groups'] = []
            for group in item.get_groups():
                ret['groups'].append(strip_json(group))
        if request.GET.get('users') == 'true':
            ret['users'] = []
            for user in item.get_users():
                ret['users'].append(prepare_user(user))
        return ret

    if request.method == 'POST' and data_item_id is None:
        o = FixtureDataItem(domain=domain, data_type_id=data_type_id, **_to_kwargs(request))
        o.save()
        return json_response(strip_json(o, disallow=['data_type_id']))
    elif request.method == 'GET' and data_item_id is None:
        return json_response([
            prepare_item(x)
            for x in sorted(FixtureDataItem.by_data_type(domain, data_type_id),
                            key=lambda x: x.sort_key)
        ])
    elif request.method == 'GET' and data_item_id:
        try:
            o = FixtureDataItem.get(data_item_id)
        except ResourceNotFound:
            raise Http404()
        assert(o.domain == domain and o.data_type.get_id == data_type_id)
        return json_response(prepare_item(o))
    elif request.method == 'PUT' and data_item_id:
        original = FixtureDataItem.get(data_item_id)
        new = FixtureDataItem(domain=domain, **_to_kwargs(request))
        for attr in 'fields',:
            setattr(original, attr, getattr(new, attr))
        original.save()
        return json_response(strip_json(original, disallow=['data_type_id']))
    elif request.method == 'DELETE' and data_item_id:
        o = FixtureDataItem.get(data_item_id)
        assert(o.domain == domain and o.data_type.get_id == data_type_id)
        with CouchTransaction() as transaction:
            o.recursive_delete(transaction)
        return json_response({})
    else:
        return HttpResponseBadRequest()
Beispiel #8
0
 def obj_delete(self, bundle, **kwargs):
     try:
         data_item = FixtureDataItem.get(kwargs['pk'])
     except ResourceNotFound:
         raise NotFound('Lookup table item not found')
     with CouchTransaction() as transaction:
         data_item.recursive_delete(transaction)
     return ImmediateHttpResponse(response=HttpAccepted())
Beispiel #9
0
 def obj_delete(self, bundle, **kwargs):
     try:
         data_item = FixtureDataItem.get(kwargs['pk'])
     except ResourceNotFound:
         raise NotFound('Lookup table item not found')
     with CouchTransaction() as transaction:
         data_item.recursive_delete(transaction)
     return ImmediateHttpResponse(response=HttpAccepted())
Beispiel #10
0
def data_items(request, domain, data_type_id, data_item_id):

    def prepare_item(item):
        ret = strip_json(item, disallow=['data_type_id'])
        if request.GET.get('groups') == 'true':
            ret['groups'] = []
            for group in item.get_groups():
                ret['groups'].append(strip_json(group))
        if request.GET.get('users') == 'true':
            ret['users'] = []
            for user in item.get_users():
                ret['users'].append(prepare_user(user))
        return ret

    if request.method == 'POST' and data_item_id is None:
        o = FixtureDataItem(domain=domain, data_type_id=data_type_id, **_to_kwargs(request))
        o.save()
        return json_response(strip_json(o, disallow=['data_type_id']))
    elif request.method == 'GET' and data_item_id is None:
        return json_response([
            prepare_item(x)
            for x in sorted(FixtureDataItem.by_data_type(domain, data_type_id),
                            key=lambda x: x.sort_key)
        ])
    elif request.method == 'GET' and data_item_id:
        try:
            o = FixtureDataItem.get(data_item_id)
        except ResourceNotFound:
            raise Http404()
        assert(o.domain == domain and o.data_type.get_id == data_type_id)
        return json_response(prepare_item(o))
    elif request.method == 'PUT' and data_item_id:
        original = FixtureDataItem.get(data_item_id)
        new = FixtureDataItem(domain=domain, **_to_kwargs(request))
        for attr in 'fields',:
            setattr(original, attr, getattr(new, attr))
        original.save()
        return json_response(strip_json(original, disallow=['data_type_id']))
    elif request.method == 'DELETE' and data_item_id:
        o = FixtureDataItem.get(data_item_id)
        assert(o.domain == domain and o.data_type.get_id == data_type_id)
        with CouchTransaction() as transaction:
            o.recursive_delete(transaction)
        return json_response({})
    else:
        return HttpResponseBadRequest()
    def report_subtitles(self):
        if self.needs_filters:
            return []

        subtitles = ["Date range: %s" % self.daterange_display]
        if self.selected_fixture():
            tag, id = self.selected_fixture()
            location = FixtureDataItem.get(id).fields_without_attributes['%s_name' % tag]
            subtitles.append('Location: %s' % location)

        if self.disease:
            location = FixtureDataItem.get(self.disease[1]).fields_without_attributes['disease_name']
            subtitles.append('Disease: %s' % location)

        if self.test_version:
            test_version = FixtureDataItem.get(self.test_version[1]).fields_without_attributes['visible_test_name']
            subtitles.append('Test Version: %s' % test_version)

        return subtitles
Beispiel #12
0
 def get_grouping_name(self, user):
     """
     Get the name of province/cbo/user (depending on what is selected)
     """
     if not self.selected_province():
         return FixtureDataItem.get(user).fields_without_attributes['name']
     elif not self.selected_cbo():
         return Group.get(user).name
     else:
         return CommCareUser.get_by_username(user).name
Beispiel #13
0
    def report_subtitles(self):
        if self.needs_filters:
            return []

        subtitles = ["Date range: %s" % self.daterange_display]
        if self.selected_fixture():
            tag, id = self.selected_fixture()
            location = FixtureDataItem.get(id).fields['%s_name' % tag]
            subtitles.append('Location: %s' % location)

        if self.disease:
            location = FixtureDataItem.get(self.disease[1]).fields['disease_name']
            subtitles.append('Disease: %s' % location)

        if self.test_version:
            test_version = FixtureDataItem.get(self.test_version[1]).fields['visible_test_name']
            subtitles.append('Test Version: %s' % test_version)

        return subtitles
Beispiel #14
0
 def get_grouping_name(self, user):
     """
     Get the name of province/cbo/user (depending on what is selected)
     """
     if not self.selected_province():
         return FixtureDataItem.get(user).fields_without_attributes['name']
     elif not self.selected_cbo():
         return Group.get(user).name
     else:
         return CommCareUser.get_by_username(user).name
Beispiel #15
0
def data_items(request, domain, data_type_id, data_item_id):

    def prepare_item(item):
        ret = strip_json(item, disallow=['data_type_id'])
        if request.GET.get('groups') == 'true':
            ret['groups'] = []
            for group in item.get_groups():
                ret['groups'].append(strip_json(group))
        if request.GET.get('users') == 'true':
            ret['users'] = []
            for user in item.get_users():
                ret['users'].append(prepare_user(user))
        return ret

    if request.method == 'POST' and data_item_id is None:
        o = FixtureDataItem(domain=domain, data_type_id=data_type_id, **_to_kwargs(request))
        o.save()
        return json_response(strip_json(o, disallow=['data_type_id']))
    elif request.method == 'GET' and data_item_id is None:
        return json_response([prepare_item(x) for x in FixtureDataItem.by_data_type(domain, data_type_id)])
    elif request.method == 'GET' and data_item_id:
        o = FixtureDataItem.get(data_item_id)
        assert(o.domain == domain and o.data_type.get_id == data_type_id)
        return json_response(prepare_item(o))
    elif request.method == 'PUT' and data_item_id:
        original = FixtureDataItem.get(data_item_id)
        new = FixtureDataItem(domain=domain, **_to_kwargs(request))
        for attr in 'fields',:
            setattr(original, attr, getattr(new, attr))
        original.save()
        return json_response(strip_json(original, disallow=['data_type_id']))
    elif request.method == 'DELETE' and data_item_id:
        o = FixtureDataItem.get(data_item_id)
        assert(o.domain == domain and o.data_type.get_id == data_type_id)
        o.delete()
        return json_response({})
    else:
        return HttpResponseBadRequest()
Beispiel #16
0
def get_unique_combinations(domain, place_types=None, place=None):
    if not place_types:
        return []
    if place:
        place_type = place[0]
        place = FixtureDataItem.get(place[1])
        place_name = place.fields_without_attributes[place_type + '_id']

    place_data_types = {}
    for pt in place_types:
        place_data_types[pt] = FixtureDataType.by_domain_tag(domain, pt).one()

    relevant_types = [t for t in reversed(place_types)]
    base_type = relevant_types[0] if relevant_types else ""
    fdis = FixtureDataItem.by_data_type(
        domain, place_data_types[base_type].get_id) if base_type else []

    combos = []
    for fdi in fdis:
        if place:
            if base_type == place_type:
                if fdi.fields_without_attributes[base_type +
                                                 '_id'] != place_name:
                    continue
            else:
                rel_type_name = fdi.fields_without_attributes.get(
                    place_type + "_id", "")
                if not rel_type_name:
                    logging.error(
                        "GSID Reports Error: fixture_id: %s -- place_type: %s"
                        % (fdi.get_id, place_type))
                    continue
                if rel_type_name.lower() != place_name:
                    continue
        comb = {}
        for pt in place_types:
            if base_type == pt:
                comb[pt] = str(fdi.fields_without_attributes[pt + '_id'])
                comb["gps"] = str(fdi.fields_without_attributes["gps"])
            else:
                p_id = fdi.fields_without_attributes.get(pt + "_id", None)
                if p_id:
                    if place and pt == place_type and p_id != place_name:
                        continue
                    comb[pt] = str(p_id)
                else:
                    comb[pt] = None
        combos.append(comb)
    return combos
Beispiel #17
0
    def generate_lineage(self, leaf_type, leaf_item_id):
        leaf_fdi = FixtureDataItem.get(leaf_item_id)

        for i, h in enumerate(self.hierarchy[::-1]):
            if h["type"] == leaf_type:
                index = i

        lineage = [leaf_fdi]
        for i, h in enumerate(self.full_hierarchy[::-1]):
            if i < index or i >= len(self.hierarchy)-1: continue
            real_index = len(self.hierarchy) - (i+1)
            lineage.insert(0, FixtureDataItem.by_field_value(self.domain, self.data_types(real_index - 1),
                h["references"], lineage[0].fields[h["parent_ref"]]).one())

        return lineage
Beispiel #18
0
    def __init__(self, request, base_context=None, domain=None, **kwargs):
        super(MCBase, self).__init__(request, base_context, domain, **kwargs)
        assert self.SECTIONS is not None
        fixture = self.request_params.get('fixture_id', None)
        if fixture:
            type_string, id = fixture.split(":")
            results = FixtureDataType.by_domain_tag(domain, type_string)
            fixture_type = results.one()
            fixture_item = FixtureDataItem.get(id)
        else:
            fixture_item = None
            fixture_type = None

        sqldata = McSqlData(self.SECTIONS, domain, self.datespan, fixture_type, fixture_item)
        self.data_provider = MCSectionedDataProvider(sqldata)
Beispiel #19
0
    def generate_lineage(self, leaf_type, leaf_item_id):
        leaf_fdi = FixtureDataItem.get(leaf_item_id)

        for i, h in enumerate(self.hierarchy[::-1]):
            if h["type"] == leaf_type:
                index = i

        lineage = [leaf_fdi]
        for i, h in enumerate(self.full_hierarchy[::-1]):
            if i < index or i >= len(self.hierarchy)-1: continue
            real_index = len(self.hierarchy) - (i+1)
            lineage.insert(0, FixtureDataItem.by_field_value(self.domain, self.data_types(real_index - 1),
                h["references"], lineage[0].fields[h["parent_ref"]]).one())

        return lineage
Beispiel #20
0
 def config(self):
     loc = None
     type = 'loc'
     if 'fixture_id' in self.request_params and self.request_params.get('fixture_id'):
         type, id = self.request_params.get('fixture_id').split(":")
         loc = FixtureDataItem.get(id).fields['name']['field_list'][0]['field_value']
     return {
         'domain': self.domain,
         'startdate': self.datespan.startdate_param_utc,
         'enddate': self.datespan.enddate_param_utc,
         type: loc,
         'one': 1,
         'zero': 0,
         'not': -1,
         'empty': ''
     }
Beispiel #21
0
 def config(self):
     loc = None
     type = 'loc'
     if 'fixture_id' in self.request_params and self.request_params.get('fixture_id'):
         type, id = self.request_params.get('fixture_id').split(":")
         loc = FixtureDataItem.get(id).fields['name']['field_list'][0]['field_value']
     return {
         'domain': self.domain,
         'startdate': self.datespan.startdate_param_utc,
         'enddate': self.datespan.enddate_param_utc,
         type: loc,
         'one': 1,
         'zero': 0,
         'not': -1,
         'empty': ''
     }
Beispiel #22
0
    def __init__(self, request, base_context=None, domain=None, **kwargs):
        super(MCBase, self).__init__(request, base_context, domain, **kwargs)
        assert self.SECTIONS is not None
        assert self.format_class is not None
        fixture = self.request_params.get('fixture_id', None)
        if fixture:
            type_string, id = fixture.split(":")
            results = FixtureDataType.by_domain_tag(domain, type_string)
            fixture_type = results.one()
            fixture_item = FixtureDataItem.get(id)
        else:
            fixture_item = None
            fixture_type = None

        sqldata = McSqlData(self.SECTIONS, self.format_class, domain,
                            self.datespan, fixture_type, fixture_item)
        self.data_provider = MCSectionedDataProvider(sqldata)
Beispiel #23
0
def get_unique_combinations(domain, place_types=None, place=None):
    if not place_types:
        return []
    if place:
        place_type = place[0]
        place = FixtureDataItem.get(place[1])
        place_name = place.fields_without_attributes[place_type + '_id']

    place_data_types = {}
    for pt in place_types:
        place_data_types[pt] = FixtureDataType.by_domain_tag(domain, pt).one()

    relevant_types = [t for t in reversed(place_types)]
    base_type = relevant_types[0] if relevant_types else ""
    fdis = FixtureDataItem.by_data_type(domain, place_data_types[base_type].get_id) if base_type else []

    combos = []
    for fdi in fdis:
        if place:
            if base_type == place_type:
                if fdi.fields_without_attributes[base_type + '_id'] != place_name:
                    continue
            else:
                rel_type_name = fdi.fields_without_attributes.get(place_type+"_id", "")
                if not rel_type_name:
                    logging.error("GSID Reports Error: fixture_id: %s -- place_type: %s" % (fdi.get_id, place_type))
                    continue
                if rel_type_name.lower() != place_name:
                    continue
        comb = {}
        for pt in place_types:
            if base_type == pt:
                comb[pt] = str(fdi.fields_without_attributes[pt + '_id'])
                comb["gps"] = str(fdi.fields_without_attributes["gps"])
            else:
                p_id = fdi.fields_without_attributes.get(pt + "_id", None)
                if p_id:
                    if place and pt == place_type and p_id != place_name:
                        continue
                    comb[pt] = str(p_id)
                else:
                    comb[pt] = None
        combos.append(comb)
    return combos
Beispiel #24
0
    def test_update(self):
        data_item = self._create_data_item()

        data_item_update = {
            "data_type_id": self.data_type._id,
            "fields": {
                "state_name": {
                    "field_list": [
                        {
                            "field_value": "Massachusetts",
                            "properties": {
                                "lang": "en"
                            }
                        },
                        {
                            "field_value": "马萨诸塞",
                            "properties": {
                                "lang": "zh"
                            }
                        },
                    ]
                }
            },
            "item_attributes": {
                "attribute1": "cool_attr_value",
            }
        }

        response = self._assert_auth_post_resource(
            self.single_endpoint(data_item._id),
            json.dumps(data_item_update),
            method="PUT")
        data_item = FixtureDataItem.get(data_item._id)
        self.assertEqual(response.status_code, 204)
        self.assertEqual(data_item.data_type_id, self.data_type._id)
        self.assertEqual(len(data_item.fields), 1)
        self.assertEqual(
            data_item.fields['state_name'].field_list[0].field_value,
            'Massachusetts')
        self.assertEqual(
            data_item.fields['state_name'].field_list[0].properties,
            {"lang": "en"})
        self.assertEqual(data_item.item_attributes,
                         {"attribute1": "cool_attr_value"})
Beispiel #25
0
    def obj_get_list(self, bundle, **kwargs):
        domain = kwargs['domain']
        parent_id = bundle.request.GET.get("parent_id", None)
        parent_ref_name = bundle.request.GET.get("parent_ref_name", None)
        references = bundle.request.GET.get("references", None)
        child_type = bundle.request.GET.get("child_type", None)
        type_id = bundle.request.GET.get("fixture_type_id", None)
        type_tag = bundle.request.GET.get("fixture_type", None)

        if parent_id and parent_ref_name and child_type and references:
            parent_fdi = FixtureDataItem.get(parent_id)
            fdis = list(FixtureDataItem.by_field_value(domain, child_type, parent_ref_name, parent_fdi.fields_without_attributes[references]))
        elif type_id or type_tag:
            type_id = type_id or FixtureDataType.by_domain_tag(domain, type_tag).one()
            fdis = list(FixtureDataItem.by_data_type(domain, type_id))
        else:
            fdis = list(FixtureDataItem.by_domain(domain))

        return [convert_fdt(fdi) for fdi in fdis] or []
Beispiel #26
0
def data_item_users(request, domain, data_type_id, data_item_id, user_id):
    data_type = FixtureDataType.get(data_type_id)
    data_item = FixtureDataItem.get(data_item_id)
    user = CommCareUser.get(user_id)
    assert(data_type.doc_type == FixtureDataType._doc_type)
    assert(data_type.domain == domain)
    assert(data_item.doc_type == FixtureDataItem._doc_type)
    assert(data_item.domain == domain)
    assert(data_item.data_type_id == data_type_id)
    assert(user.doc_type == CommCareUser._doc_type)
    assert(user.domain == domain)

    if request.method == 'POST':
        data_item.add_user(user)
        return json_response({})
    elif request.method == 'DELETE':
        data_item.remove_user(user)
        return json_response({})
    else:
        return HttpResponseBadRequest()
Beispiel #27
0
def data_item_groups(request, domain, data_type_id, data_item_id, group_id):
    data_type = FixtureDataType.get(data_type_id)
    data_item = FixtureDataItem.get(data_item_id)
    group = Group.get(group_id)
    assert(data_type.doc_type == FixtureDataType._doc_type)
    assert(data_type.domain == domain)
    assert(data_item.doc_type == FixtureDataItem._doc_type)
    assert(data_item.domain == domain)
    assert(data_item.data_type_id == data_type_id)
    assert(group.doc_type == Group._doc_type)
    assert(group.domain == domain)

    if request.method == 'POST':
        data_item.add_group(group)
        return json_response({})
    elif request.method == 'DELETE':
        data_item.remove_group(group)
        return json_response({})
    else:
        return HttpResponseBadRequest()
Beispiel #28
0
def data_item_users(request, domain, data_type_id, data_item_id, user_id):
    data_type = FixtureDataType.get(data_type_id)
    data_item = FixtureDataItem.get(data_item_id)
    user = CommCareUser.get(user_id)
    assert(data_type.doc_type == FixtureDataType._doc_type)
    assert(data_type.domain == domain)
    assert(data_item.doc_type == FixtureDataItem._doc_type)
    assert(data_item.domain == domain)
    assert(data_item.data_type_id == data_type_id)
    assert(user.doc_type == CommCareUser._doc_type)
    assert(user.domain == domain)

    if request.method == 'POST':
        data_item.add_user(user)
        return json_response({})
    elif request.method == 'DELETE':
        data_item.remove_user(user)
        return json_response({})
    else:
        return HttpResponseBadRequest()
Beispiel #29
0
def data_item_groups(request, domain, data_type_id, data_item_id, group_id):
    data_type = FixtureDataType.get(data_type_id)
    data_item = FixtureDataItem.get(data_item_id)
    group = Group.get(group_id)
    assert(data_type.doc_type == FixtureDataType._doc_type)
    assert(data_type.domain == domain)
    assert(data_item.doc_type == FixtureDataItem._doc_type)
    assert(data_item.domain == domain)
    assert(data_item.data_type_id == data_type_id)
    assert(group.doc_type == Group._doc_type)
    assert(group.domain == domain)

    if request.method == 'POST':
        data_item.add_group(group)
        return json_response({})
    elif request.method == 'DELETE':
        data_item.remove_group(group)
        return json_response({})
    else:
        return HttpResponseBadRequest()
Beispiel #30
0
    def obj_get_list(self, bundle, **kwargs):
        domain = kwargs['domain']
        parent_id = bundle.request.GET.get("parent_id", None)
        parent_ref_name = bundle.request.GET.get("parent_ref_name", None)
        references = bundle.request.GET.get("references", None)
        child_type = bundle.request.GET.get("child_type", None)
        type_id = bundle.request.GET.get("fixture_type_id", None)
        type_tag = bundle.request.GET.get("fixture_type", None)

        if parent_id and parent_ref_name and child_type and references:
            parent_fdi = FixtureDataItem.get(parent_id)
            fdis = list(
                FixtureDataItem.by_field_value(
                    domain, child_type, parent_ref_name,
                    parent_fdi.fields_without_attributes[references]))
        elif type_id or type_tag:
            type_id = type_id or FixtureDataType.by_domain_tag(
                domain, type_tag).one()
            fdis = list(FixtureDataItem.by_data_type(domain, type_id))
        else:
            fdis = list(FixtureDataItem.by_domain(domain))

        return [convert_fdt(fdi) for fdi in fdis] or []
Beispiel #31
0
def _process_data_item(domain, replace, data_type, di, item_fields, item_attributes, sort_key):
    """Processes a FixtureDataItem from its excel upload.

    Returns a tuple with
      - (unsaved) FixtureDataItem instance
      - boolean flag to indiicate if the data item was deleted
      - a list of errors
    """
    delete = False
    errors = []
    new_data_item = FixtureDataItem(
        domain=domain,
        data_type_id=data_type.get_id,
        fields=item_fields,
        item_attributes=item_attributes,
        sort_key=sort_key
    )
    try:
        if di['UID'] and not replace:
            old_data_item = FixtureDataItem.get(di['UID'])
        else:
            old_data_item = new_data_item
        old_data_item.fields = item_fields
        old_data_item.item_attributes = item_attributes
        if (old_data_item.domain != domain
                or not old_data_item.data_type_id == data_type.get_id):
            old_data_item = new_data_item
            errors.append(
                _("'%(UID)s' is not a valid UID. But the new item is created.")
                % {'UID': di['UID']}
            )
        if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
            delete = True
    except (ResourceNotFound, KeyError):
        old_data_item = new_data_item

    return old_data_item, delete, errors
Beispiel #32
0
def run_upload(request, domain, workbook, replace=False):
    return_val = {
        "unknown_groups": [],
        "unknown_users": [],
        "number_of_fixtures": 0,
    }
    failure_messages = {
        "has_no_column":
        "Workbook 'types' has no column '{column_name}'.",
        "has_no_field_column":
        "Excel-sheet '{tag}' does not contain the column '{field}' "
        "as specified in its 'types' definition",
        "has_extra_column":
        "Excel-sheet '{tag}' has an extra column" +
        "'{field}' that's not defined in its 'types' definition",
        "wrong_property_syntax":
        "Properties should be specified as 'field 1: property 1'. In 'types' sheet, "
        + "'{prop_key}' for field '{field}' is not correctly formatted",
        "sheet_has_no_property":
        "Excel-sheet '{tag}' does not contain property " +
        "'{property}' of the field '{field}' as specified in its 'types' definition",
        "sheet_has_extra_property":
        "Excel-sheet '{tag}'' has an extra property " +
        "'{property}' for the field '{field}' that's not defined in its 'types' definition. Re-check the formatting",
        "invalid_field_with_property":
        "Fields with attributes should be numbered as 'field: {field} integer",
        "invalid_property":
        "Attribute should be written as '{field}: {prop} interger'",
        "wrong_field_property_combos":
        "Number of values for field '{field}' and attribute '{prop}' should be same",
        "replace_with_UID":
        "Rows shouldn't contain UIDs while using replace option. Excel sheet '{tag}' contains UID in a row."
    }

    group_memoizer = GroupMemoizer(domain)

    data_types = workbook.get_worksheet(title='types')

    def _get_or_raise(container, attr):
        try:
            return container[attr]
        except KeyError:
            raise ExcelMalformatException(
                _(failure_messages["has_no_column"].format(column_name=attr)))

    def diff_lists(list_a, list_b):
        set_a = set(list_a)
        set_b = set(list_b)
        not_in_b = set_a.difference(set_b)
        not_in_a = set_a.difference(set_a)
        return list(not_in_a), list(not_in_b)

    number_of_fixtures = -1
    with CouchTransaction() as transaction:
        fixtures_tags = []
        type_sheets = []
        for number_of_fixtures, dt in enumerate(data_types):
            try:
                tag = _get_or_raise(dt, 'table_id')
            except ExcelMalformatException:
                tag = _get_or_raise(dt, 'tag')
            if tag in fixtures_tags:
                error_message = "Upload Failed: Lookup-tables should have unique 'table_id'. There are two rows with table_id '{tag}' in 'types' sheet."
                raise DuplicateFixtureTagException(
                    _(error_message.format(tag=tag)))
            fixtures_tags.append(tag)
            type_sheets.append(dt)
        for number_of_fixtures, dt in enumerate(type_sheets):
            try:
                tag = _get_or_raise(dt, 'table_id')
            except ExcelMalformatException:
                messages.info(
                    request,
                    _("Excel-header 'tag' is renamed as 'table_id' and 'name' header is no longer needed."
                      ))
                tag = _get_or_raise(dt, 'tag')

            type_definition_fields = _get_or_raise(dt, 'field')
            type_fields_with_properties = []
            for count, field in enumerate(type_definition_fields):
                prop_key = "field " + str(count + 1)
                if dt.has_key(prop_key):
                    try:
                        property_list = dt[prop_key]["property"]
                    except KeyError:
                        error_message = failure_messages[
                            "wrong_property_syntax"].format(prop_key=prop_key,
                                                            field=field)
                        raise ExcelMalformatException(_(error_message))
                else:
                    property_list = []
                field_with_prop = FixtureTypeField(field_name=field,
                                                   properties=property_list)
                type_fields_with_properties.append(field_with_prop)

            new_data_type = FixtureDataType(
                domain=domain,
                is_global=dt.get('is_global', False),
                tag=tag,
                fields=type_fields_with_properties,
            )
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif 'UID' in dt and dt['UID']:
                    data_type = FixtureDataType.get(dt['UID'])
                else:
                    data_type = new_data_type
                    pass
                if replace:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type
                data_type.fields = type_fields_with_properties
                data_type.is_global = dt.get('is_global', False)
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    messages.error(
                        request,
                        _("'%(UID)s' is not a valid UID. But the new type is created."
                          ) % {'UID': dt['UID']})
                if dt[DELETE_HEADER] == "Y" or dt[DELETE_HEADER] == "y":
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError) as e:
                data_type = new_data_type
            transaction.save(data_type)

            data_items = workbook.get_worksheet(data_type.tag)
            for sort_key, di in enumerate(data_items):
                # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH
                item_fields_list = di['field'].keys()
                not_in_sheet, not_in_types = diff_lists(
                    item_fields_list, data_type.fields_without_attributes)
                if len(not_in_sheet) > 0:
                    error_message = failure_messages[
                        "has_no_field_column"].format(tag=tag,
                                                      field=not_in_sheet[0])
                    raise ExcelMalformatException(_(error_message))
                if len(not_in_types) > 0:
                    error_message = failure_messages[
                        "has_extra_column"].format(tag=tag,
                                                   field=not_in_types[0])
                    raise ExcelMalformatException(_(error_message))

                # check that properties in 'types' sheet vs item-sheet MATCH
                for field in data_type.fields:
                    if len(field.properties) > 0:
                        sheet_props = di.get(field.field_name, {})
                        sheet_props_list = sheet_props.keys()
                        type_props = field.properties
                        not_in_sheet, not_in_types = diff_lists(
                            sheet_props_list, type_props)
                        if len(not_in_sheet) > 0:
                            error_message = failure_messages[
                                "sheet_has_no_property"].format(
                                    tag=tag,
                                    property=not_in_sheet[0],
                                    field=field.field_name)
                            raise ExcelMalformatException(_(error_message))
                        if len(not_in_types) > 0:
                            error_message = failure_messages[
                                "sheet_has_extra_property"].format(
                                    tag=tag,
                                    property=not_in_types[0],
                                    field=field.field_name)
                            raise ExcelMalformatException(_(error_message))
                        # check that fields with properties are numbered
                        if type(di['field'][field.field_name]) != list:
                            error_message = failure_messages[
                                "invalid_field_with_property"].format(
                                    field=field.field_name)
                            raise ExcelMalformatException(_(error_message))
                        field_prop_len = len(di['field'][field.field_name])
                        for prop in sheet_props:
                            if type(sheet_props[prop]) != list:
                                error_message = failure_messages[
                                    "invalid_property"].format(
                                        field=field.field_name, prop=prop)
                                raise ExcelMalformatException(_(error_message))
                            if len(sheet_props[prop]) != field_prop_len:
                                error_message = failure_messages[
                                    "wrong_field_property_combos"].format(
                                        field=field.field_name, prop=prop)
                                raise ExcelMalformatException(_(error_message))

                # excel format check should have been covered by this line. Can make assumptions about data now
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(field_list=[
                            FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=unicode(di['field'][
                                    field.field_name]),
                                properties={})
                        ])
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=unicode(field_prop_combos[x]),
                                properties={
                                    prop: unicode(prop_dict[prop][x])
                                    for prop in prop_dict
                                })
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list)

                new_data_item = FixtureDataItem(domain=domain,
                                                data_type_id=data_type.get_id,
                                                fields=item_fields,
                                                sort_key=sort_key)
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields
                    if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        messages.error(
                            request,
                            _("'%(UID)s' is not a valid UID. But the new item is created."
                              ) % {'UID': di['UID']})
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        messages.error(
                            request,
                            _("Unknown group: '%(name)s'. But the row is successfully added"
                              ) % {'name': group_name})

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(str(raw_username),
                                                      domain)
                    except ValidationError:
                        messages.error(
                            request,
                            _("Invalid username: '******'. Row is not added")
                            % {'name': raw_username})
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        messages.error(
                            request,
                            _("Unknown user: '******'. But the row is successfully added"
                              ) % {'name': raw_username})

    return_val["number_of_fixtures"] = number_of_fixtures + 1
    return return_val
Beispiel #33
0
    def columns(self):
        sum_fn = lambda x, y: int(x or 0) + int(y or 0)

        total_percent_agg_fn = lambda f_pos, m_pos, f_tot, m_tot: dict(sort_key=sum_fn(f_pos, m_pos), html="%(x)s (%(p)s%%)" % \
            {
                "x": sum_fn(f_pos, m_pos),
                "p": (100 * sum_fn(f_pos, m_pos) / (sum_fn(m_tot, f_tot) or 1))
            })

        patient_number_group = DataTablesColumnGroup("Tests")
        positive_group = DataTablesColumnGroup("Positive Tests")
        age_range_group = DataTablesColumnGroup("Age Range")

        male_filter = EQ("gender", "male")
        female_filter = EQ("gender", "female")

        columns = self.common_columns + [
            DatabaseColumn("Number of Males ",
                           CountColumn('doc_id',
                                       alias="male-total",
                                       filters=self.filters + [male_filter]),
                           header_group=patient_number_group),
            DatabaseColumn("Number of Females ",
                           CountColumn('doc_id',
                                       alias="female-total",
                                       filters=self.filters + [female_filter]),
                           header_group=patient_number_group),
            AggregateColumn(
                "Total",
                sum_fn,
                [AliasColumn("male-total"),
                 AliasColumn("female-total")],
                header_group=patient_number_group),
            AggregateColumn(
                "Male +ve Percent",
                self.percent_agg_fn, [
                    CountColumn(
                        'doc_id',
                        alias="male-positive",
                        filters=self.filters +
                        [AND([male_filter,
                              EQ("diagnosis", "positive")])]),
                    AliasColumn("male-total")
                ],
                header_group=positive_group,
                sort_type=DTSortType.NUMERIC),
            AggregateColumn(
                "Female +ve Percent",
                self.percent_agg_fn, [
                    CountColumn(
                        'doc_id',
                        alias="female-positive",
                        filters=self.filters +
                        [AND([female_filter,
                              EQ("diagnosis", "positive")])]),
                    AliasColumn("female-total")
                ],
                header_group=positive_group,
                sort_type=DTSortType.NUMERIC),
            AggregateColumn("Total +ve Percent",
                            total_percent_agg_fn, [
                                AliasColumn("female-positive"),
                                AliasColumn("male-positive"),
                                AliasColumn("female-total"),
                                AliasColumn("male-total")
                            ],
                            header_group=positive_group,
                            sort_type=DTSortType.NUMERIC),
            AggregateColumn(
                "Male age range",
                functools.partial(self.age_fn, 'male'), [
                    MinColumn("age",
                              alias="male-min",
                              filters=self.filters + [male_filter]),
                    MaxColumn("age",
                              alias="male-max",
                              filters=self.filters + [male_filter])
                ],
                header_group=age_range_group),
            AggregateColumn(
                "Female age range",
                functools.partial(self.age_fn, 'female'), [
                    MinColumn("age",
                              alias="female-min",
                              filters=self.filters + [female_filter]),
                    MaxColumn("age",
                              alias="female-max",
                              filters=self.filters + [female_filter])
                ],
                header_group=age_range_group),
            AggregateColumn("All age range",
                            functools.partial(self.age_fn, 'total'), [
                                MinColumn("age",
                                          alias="age-min",
                                          filters=self.filters +
                                          [OR([female_filter, male_filter])]),
                                MaxColumn("age",
                                          alias="age-max",
                                          filters=self.filters +
                                          [OR([female_filter, male_filter])])
                            ],
                            header_group=age_range_group),
        ]

        if self.is_map:
            columns.append(
                DatabaseColumn("gps",
                               MaxColumn(self.gps_key),
                               format_fn=lambda x: x))
            disease = FixtureDataItem.get(
                self.disease[1]).fields_without_attributes[
                    'disease_name'] if self.disease else 'All diseases'
            columns.append(
                DatabaseColumn('disease', StaticColumn('disease', disease)))

        return columns
Beispiel #34
0
def _run_fixture_upload(domain, workbook, replace=False, task=None):
    from corehq.apps.users.bulkupload import GroupMemoizer
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)
    get_location = get_memoized_location_getter(domain)
    data_types = []

    with CouchTransaction() as transaction:
        type_sheets = workbook.get_all_type_sheets()
        total_tables = len(type_sheets)
        return_val.number_of_fixtures = total_tables

        def _update_progress(table_count, item_count, items_in_table):
            if task:
                processed = table_count * 10 + (10 * item_count / items_in_table)
                DownloadBase.set_progress(task, processed, 10 * total_tables)

        for table_number, table_def in enumerate(type_sheets):
            tag = table_def.table_id
            new_data_type = FixtureDataType(
                domain=domain,
                is_global=table_def.is_global,
                tag=tag,
                fields=table_def.fields,
                item_attributes=table_def.item_attributes
            )
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif table_def.uid:
                    data_type = FixtureDataType.get(table_def.uid)
                else:
                    data_type = new_data_type

                if replace and data_type != new_data_type:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type

                data_type.fields = table_def.fields
                data_type.item_attributes = table_def.item_attributes
                data_type.is_global = table_def.is_global
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    return_val.errors.append(
                        _("'%(UID)s' is not a valid UID. But the new type is created.")
                        % {'UID': table_def.uid}
                    )
                if table_def.delete:
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError):
                data_type = new_data_type
            transaction.save(data_type)
            data_types.append(data_type)
            data_items = list(workbook.get_data_sheet(data_type.tag))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(
                            field_list=[FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=six.text_type(di['field'][field.field_name]),
                                properties={}
                            )]
                        )
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=six.text_type(field_prop_combos[x]),
                                properties={prop: six.text_type(prop_dict[prop][x]) for prop in prop_dict}
                            )
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list
                        )

                item_attributes = di.get('property', {})
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    item_attributes=item_attributes,
                    sort_key=sort_key
                )
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields
                    old_data_item.item_attributes = item_attributes
                    if old_data_item.domain != domain \
                            or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        return_val.errors.append(
                            _("'%(UID)s' is not a valid UID. But the new item is created.")
                            % {'UID': di['UID']}
                        )
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)
                old_locations = old_data_item.locations
                for location in old_locations:
                    old_data_item.remove_location(location)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        return_val.errors.append(
                            _("Unknown group: '%(name)s'. But the row is successfully added")
                            % {'name': group_name}
                        )

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(str(raw_username), domain)
                    except ValidationError:
                        return_val.errors.append(
                            _("Invalid username: '******'. Row is not added")
                            % {'name': raw_username}
                        )
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        return_val.errors.append(
                            _("Unknown user: '******'. But the row is successfully added")
                            % {'name': raw_username}
                        )

                for name in di.get('location', []):
                    location_cache = get_location(name)
                    if location_cache.is_error:
                        return_val.errors.append(location_cache.message)
                    else:
                        old_data_item.add_location(location_cache.location,
                                                   transaction=transaction)

    clear_fixture_quickcache(domain, data_types)
    clear_fixture_cache(domain)
    return return_val
Beispiel #35
0
    def columns(self):
        sum_fn = lambda x, y: int(x or 0) + int(y or 0)

        total_percent_agg_fn = lambda f_pos, m_pos, f_tot, m_tot: dict(sort_key=sum_fn(f_pos, m_pos), html="%(x)s (%(p)s%%)" % \
            {
                "x": sum_fn(f_pos, m_pos),
                "p": (100 * sum_fn(f_pos, m_pos) / (sum_fn(m_tot, f_tot) or 1))
            })

        patient_number_group = DataTablesColumnGroup("Tests")
        positive_group = DataTablesColumnGroup("Positive Tests")
        age_range_group = DataTablesColumnGroup("Age Range")

        male_filter = EQ("gender", "male")
        female_filter = EQ("gender", "female")

        columns = self.common_columns + [
            
            DatabaseColumn(
                "Number of Males ", 
                SumColumn('cases', alias="male-total", filters=self.filters + [male_filter]),
                header_group=patient_number_group
            ),
            DatabaseColumn(
                "Number of Females ", 
                SumColumn('cases', alias="female-total", filters=self.filters + [female_filter]),
                header_group=patient_number_group
            ),
            AggregateColumn(
                "Total", sum_fn,
                [AliasColumn("male-total"), AliasColumn("female-total")],
                header_group=patient_number_group
            ),

            AggregateColumn(
                "Male +ve Percent", self.percent_agg_fn,
                [
                    SumColumn(
                        'cases',
                        alias="male-positive", 
                        filters=self.filters + [AND([male_filter, EQ("diagnosis", "positive")])]
                    ), 
                    AliasColumn("male-total")
                ],
                header_group=positive_group, sort_type=DTSortType.NUMERIC
            ),
            AggregateColumn(
                "Female +ve Percent", self.percent_agg_fn,
                [
                    SumColumn('cases',
                        alias="female-positive", 
                        filters=self.filters + [AND([female_filter, EQ("diagnosis", "positive")])]
                    ), 
                    AliasColumn("female-total")
                ],
                header_group=positive_group, sort_type=DTSortType.NUMERIC
            ),
            AggregateColumn(
                "Total +ve Percent", total_percent_agg_fn,
                [
                    AliasColumn("female-positive"), 
                    AliasColumn("male-positive"),
                    AliasColumn("female-total"), AliasColumn("male-total")
                ],
                header_group=positive_group, sort_type=DTSortType.NUMERIC
            ),

            AggregateColumn(
                "Male age range", functools.partial(self.age_fn, 'male'),
                [
                    MinColumn("age", alias="male-min", filters=self.filters + [male_filter]),
                    MaxColumn("age", alias="male-max", filters=self.filters + [male_filter])
                ],
                header_group=age_range_group
            ),
            AggregateColumn(
                "Female age range", functools.partial(self.age_fn, 'female'),
                [
                    MinColumn("age", alias="female-min", filters=self.filters + [female_filter]),
                    MaxColumn("age", alias="female-max", filters=self.filters + [female_filter])
                ],
                header_group=age_range_group
            ),
            AggregateColumn(
                "All age range", functools.partial(self.age_fn, 'total'),
                [
                    MinColumn("age", alias="age-min", filters=self.filters + [OR([female_filter, male_filter])]),
                    MaxColumn("age", alias="age-max", filters=self.filters + [OR([female_filter, male_filter])])
                ],
                header_group=age_range_group
            ),
        ]

        if self.is_map:
            columns.append(DatabaseColumn("gps", MaxColumn(self.gps_key), format_fn=lambda x: x))
            disease = FixtureDataItem.get(self.disease[1]).fields['disease_name'] if self.disease else 'All diseases'
            columns.append(DatabaseColumn('disease', StaticColumn('disease', disease)))

        return columns
Beispiel #36
0
def run_upload(request, domain, workbook, replace=False):
    return_val = {
        "unknown_groups": [], 
        "unknown_users": [], 
        "number_of_fixtures": 0,
    }
    failure_messages = {
        "has_no_column": "Workbook 'types' has no column '{column_name}'.",
        "has_no_field_column": "Excel-sheet '{tag}' does not contain the column '{field}' "
                               "as specified in its 'types' definition",
        "has_extra_column": "Excel-sheet '{tag}' has an extra column" + 
                            "'{field}' that's not defined in its 'types' definition",
        "wrong_property_syntax": "Properties should be specified as 'field 1: property 1'. In 'types' sheet, " +
                            "'{prop_key}' for field '{field}' is not correctly formatted",
        "sheet_has_no_property": "Excel-sheet '{tag}' does not contain property " +
                            "'{property}' of the field '{field}' as specified in its 'types' definition",
        "sheet_has_extra_property": "Excel-sheet '{tag}'' has an extra property " +
                            "'{property}' for the field '{field}' that's not defined in its 'types' definition. Re-check the formatting", 
        "invalid_field_with_property": "Fields with attributes should be numbered as 'field: {field} integer",
        "invalid_property": "Attribute should be written as '{field}: {prop} interger'",
        "wrong_field_property_combos": "Number of values for field '{field}' and attribute '{prop}' should be same",
        "replace_with_UID": "Rows shouldn't contain UIDs while using replace option. Excel sheet '{tag}' contains UID in a row."
    }

    group_memoizer = GroupMemoizer(domain)

    data_types = workbook.get_worksheet(title='types')

    def _get_or_raise(container, attr):
        try:
            return container[attr]
        except KeyError:
            raise ExcelMalformatException(_(failure_messages["has_no_column"].format(column_name=attr)))

    def diff_lists(list_a, list_b):
        set_a = set(list_a)
        set_b = set(list_b)
        not_in_b = set_a.difference(set_b)
        not_in_a = set_a.difference(set_a)
        return list(not_in_a), list(not_in_b)
   
    number_of_fixtures = -1
    with CouchTransaction() as transaction:
        fixtures_tags = []
        type_sheets = []
        for number_of_fixtures, dt in enumerate(data_types):
            try:
                tag = _get_or_raise(dt, 'table_id')
            except ExcelMalformatException:
                tag = _get_or_raise(dt, 'tag')
            if tag in fixtures_tags:
                error_message = "Upload Failed: Lookup-tables should have unique 'table_id'. There are two rows with table_id '{tag}' in 'types' sheet."
                raise DuplicateFixtureTagException(_(error_message.format(tag=tag)))
            fixtures_tags.append(tag)
            type_sheets.append(dt)
        for number_of_fixtures, dt in enumerate(type_sheets):
            try:
                tag = _get_or_raise(dt, 'table_id')
            except ExcelMalformatException:
                messages.info(request, _("Excel-header 'tag' is renamed as 'table_id' and 'name' header is no longer needed."))
                tag = _get_or_raise(dt, 'tag')

            type_definition_fields = _get_or_raise(dt, 'field')
            type_fields_with_properties = []
            for count, field in enumerate(type_definition_fields):
                prop_key = "field " + str(count + 1)
                if dt.has_key(prop_key):
                    try:
                        property_list = dt[prop_key]["property"]
                    except KeyError:
                        error_message = failure_messages["wrong_property_syntax"].format(
                            prop_key=prop_key,
                            field=field
                        )
                        raise ExcelMalformatException(_(error_message))
                else:
                    property_list = []
                field_with_prop = FixtureTypeField(
                    field_name=field,
                    properties=property_list
                )
                type_fields_with_properties.append(field_with_prop)

            new_data_type = FixtureDataType(
                domain=domain,
                is_global=dt.get('is_global', False),
                tag=tag,
                fields=type_fields_with_properties,
            )
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif 'UID' in dt and dt['UID']:
                    data_type = FixtureDataType.get(dt['UID'])
                else:
                    data_type = new_data_type
                    pass
                if replace:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type
                data_type.fields = type_fields_with_properties
                data_type.is_global = dt.get('is_global', False)
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    messages.error(request, _("'%(UID)s' is not a valid UID. But the new type is created.") % {'UID': dt['UID']})
                if dt[DELETE_HEADER] == "Y" or dt[DELETE_HEADER] == "y":
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError) as e:
                data_type = new_data_type
            transaction.save(data_type)

            data_items = workbook.get_worksheet(data_type.tag)
            for sort_key, di in enumerate(data_items):
                # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH
                item_fields_list = di['field'].keys()
                not_in_sheet, not_in_types = diff_lists(item_fields_list, data_type.fields_without_attributes)
                if len(not_in_sheet) > 0:
                    error_message = failure_messages["has_no_field_column"].format(tag=tag, field=not_in_sheet[0])
                    raise ExcelMalformatException(_(error_message))
                if len(not_in_types) > 0:
                    error_message = failure_messages["has_extra_column"].format(tag=tag, field=not_in_types[0])
                    raise ExcelMalformatException(_(error_message))

                # check that properties in 'types' sheet vs item-sheet MATCH
                for field in data_type.fields:
                    if len(field.properties) > 0:
                        sheet_props = di.get(field.field_name, {})
                        sheet_props_list = sheet_props.keys()
                        type_props = field.properties
                        not_in_sheet, not_in_types = diff_lists(sheet_props_list, type_props)
                        if len(not_in_sheet) > 0:
                            error_message = failure_messages["sheet_has_no_property"].format(
                                tag=tag,
                                property=not_in_sheet[0],
                                field=field.field_name
                            )
                            raise ExcelMalformatException(_(error_message))
                        if len(not_in_types) > 0:
                            error_message = failure_messages["sheet_has_extra_property"].format(
                                tag=tag,
                                property=not_in_types[0],
                                field=field.field_name
                            )
                            raise ExcelMalformatException(_(error_message))
                        # check that fields with properties are numbered
                        if type(di['field'][field.field_name]) != list:
                            error_message = failure_messages["invalid_field_with_property"].format(field=field.field_name)
                            raise ExcelMalformatException(_(error_message))
                        field_prop_len = len(di['field'][field.field_name])
                        for prop in sheet_props:
                            if type(sheet_props[prop]) != list:
                                error_message = failure_messages["invalid_property"].format(
                                    field=field.field_name,
                                    prop=prop
                                )
                                raise ExcelMalformatException(_(error_message))
                            if len(sheet_props[prop]) != field_prop_len:
                                error_message = failure_messages["wrong_field_property_combos"].format(
                                    field=field.field_name,
                                    prop=prop
                                )
                                raise ExcelMalformatException(_(error_message))

                # excel format check should have been covered by this line. Can make assumptions about data now
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(
                            field_list=[FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=unicode(di['field'][field.field_name]),
                                properties={}
                            )]
                        )
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=unicode(field_prop_combos[x]),
                                properties={prop: unicode(prop_dict[prop][x]) for prop in prop_dict}
                            )
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list
                        )

                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    sort_key=sort_key
                )
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields   
                    if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        messages.error(request, _("'%(UID)s' is not a valid UID. But the new item is created.") % {'UID': di['UID']})
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue               
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        messages.error(request, _("Unknown group: '%(name)s'. But the row is successfully added") % {'name': group_name})

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(raw_username, domain)
                    except ValidationError:
                        messages.error(request, _("Invalid username: '******'. Row is not added") % {'name': raw_username})
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        messages.error(request, _("Unknown user: '******'. But the row is successfully added") % {'name': raw_username})

    return_val["number_of_fixtures"] = number_of_fixtures + 1
    return return_val
Beispiel #37
0
def _run_fixture_upload(domain, workbook, replace=False, task=None):
    from corehq.apps.users.bulkupload import GroupMemoizer
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)
    get_location = get_memoized_location_getter(domain)
    data_types = []

    with CouchTransaction() as transaction:
        type_sheets = workbook.get_all_type_sheets()
        total_tables = len(type_sheets)
        return_val.number_of_fixtures = total_tables

        def _update_progress(table_count, item_count, items_in_table):
            if task:
                processed = table_count * 10 + (10 * item_count /
                                                items_in_table)
                DownloadBase.set_progress(task, processed, 10 * total_tables)

        for table_number, table_def in enumerate(type_sheets):
            tag = table_def.table_id
            new_data_type = FixtureDataType(
                domain=domain,
                is_global=table_def.is_global,
                tag=tag,
                fields=table_def.fields,
                item_attributes=table_def.item_attributes)
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif table_def.uid:
                    data_type = FixtureDataType.get(table_def.uid)
                else:
                    data_type = new_data_type

                if replace and data_type != new_data_type:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type

                data_type.fields = table_def.fields
                data_type.item_attributes = table_def.item_attributes
                data_type.is_global = table_def.is_global
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    return_val.errors.append(
                        _("'%(UID)s' is not a valid UID. But the new type is created."
                          ) % {'UID': table_def.uid})
                if table_def.delete:
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError):
                data_type = new_data_type
            transaction.save(data_type)
            data_types.append(data_type)
            data_items = list(workbook.get_data_sheet(data_type.tag))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(field_list=[
                            FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=six.text_type(di['field'][
                                    field.field_name]),
                                properties={})
                        ])
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=six.text_type(
                                    field_prop_combos[x]),
                                properties={
                                    prop: six.text_type(prop_dict[prop][x])
                                    for prop in prop_dict
                                })
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list)

                item_attributes = di.get('property', {})
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    item_attributes=item_attributes,
                    sort_key=sort_key)
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields
                    old_data_item.item_attributes = item_attributes
                    if old_data_item.domain != domain \
                            or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        return_val.errors.append(
                            _("'%(UID)s' is not a valid UID. But the new item is created."
                              ) % {'UID': di['UID']})
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)
                old_locations = old_data_item.locations
                for location in old_locations:
                    old_data_item.remove_location(location)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        return_val.errors.append(
                            _("Unknown group: '%(name)s'. But the row is successfully added"
                              ) % {'name': group_name})

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(str(raw_username),
                                                      domain)
                    except ValidationError:
                        return_val.errors.append(
                            _("Invalid username: '******'. Row is not added")
                            % {'name': raw_username})
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        return_val.errors.append(
                            _("Unknown user: '******'. But the row is successfully added"
                              ) % {'name': raw_username})

                for name in di.get('location', []):
                    location_cache = get_location(name)
                    if location_cache.is_error:
                        return_val.errors.append(location_cache.message)
                    else:
                        old_data_item.add_location(location_cache.location,
                                                   transaction=transaction)

    clear_fixture_quickcache(domain, data_types)
    clear_fixture_cache(domain)
    return return_val
Beispiel #38
0
 def get(self, fixture_id):
     item = FixtureDataItem.get(fixture_id)
     fields = {k: to_field_value(v) for k, v in item.fields.iteritems()}
     return self.model_class(_fixture_id=item.get_id, **fields)
Beispiel #39
0
def run_upload(request, domain, workbook):
    return_val = {
        "unknown_groups": [], 
        "unknown_users": [], 
        "number_of_fixtures": 0,
    }
    group_memoizer = GroupMemoizer(domain)

    data_types = workbook.get_worksheet(title='types')

    def _get_or_raise(container, attr):
        try:
            return container[attr]
        except KeyError:
            raise Exception("Workbook 'types' has no column '{attr}'".format(attr=attr))
   
    number_of_fixtures = -1
    with CouchTransaction() as transaction:
        for number_of_fixtures, dt in enumerate(data_types):
            tag = _get_or_raise(dt, 'tag')
            type_definition_fields = _get_or_raise(dt, 'field')

            new_data_type = FixtureDataType(
                    domain=domain,
                    is_global=dt.get('is_global', False),
                    name=_get_or_raise(dt, 'name'),
                    tag=_get_or_raise(dt, 'tag'),
                    fields=type_definition_fields,
            )
            try:
                if dt['UID']:
                    data_type = FixtureDataType.get(dt['UID'])
                else:
                    data_type = new_data_type
                    pass
                data_type.fields = type_definition_fields
                data_type.is_global = dt.get('is_global', False)
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    messages.error(request, _("'%(UID)s' is not a valid UID. But the new type is created.") % {'UID': dt['UID']})
                if dt[DELETE_HEADER] == "Y" or dt[DELETE_HEADER] == "y":
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError) as e:
                data_type = new_data_type
            transaction.save(data_type)

            data_items = workbook.get_worksheet(data_type.tag)
            for sort_key, di in enumerate(data_items):
                # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH
                item_fields = di['field']
                for field in type_definition_fields:
                    if not item_fields.has_key(field):
                        raise Exception(_("Workbook '%(tag)s' does not contain the column " +
                                          "'%(field)s' specified in its 'types' definition") % {'tag': tag, 'field': field})
                item_fields_list = di['field'].keys()
                for field in item_fields_list:
                    if not field in type_definition_fields:
                        raise Exception(_("""Workbook '%(tag)s' has an extra column 
                                          '%(field)s' that's not defined in its 'types' definition""") % {'tag': tag, 'field': field})                
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    sort_key=sort_key
                )
                try:
                    if di['UID']:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = di['field']   
                    if old_data_item.domain != domain:
                        old_data_item = new_data_item
                        messages.error(request, _("'%(UID)s' is not a valid UID. But the new item is created.") % {'UID': di['UID'] })
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    assert old_data_item.data_type_id == data_type.get_id
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue               
                except (ResourceNotFound, KeyError) as e:
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.get_groups()
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.get_users()
                for user in old_users:
                    old_data_item.remove_user(user)

                for group_name in di.get('group', []):
                        group = group_memoizer.by_name(group_name)
                        if group:
                            old_data_item.add_group(group, transaction=transaction)
                        else:
                            messages.error(request, _("Unknown group: '%(name)s'. But the row is successfully added") % {'name': group_name})

                for raw_username in di.get('user', []):
                        try:
                            username = normalize_username(raw_username, domain)
                        except ValidationError:
                            messages.error(request, _("Invalid username: '******'. Row is not added") % {'name': raw_username})
                            continue
                        user = CommCareUser.get_by_username(username)
                        if user:
                            old_data_item.add_user(user)
                        else:
                            messages.error(request, _("Unknown user: '******'. But the row is successfully added") % {'name': raw_username})

    return_val["number_of_fixtures"] = number_of_fixtures + 1
    return return_val
Beispiel #40
0
def run_upload(domain, workbook, replace=False, task=None):
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)
    get_location = get_memoized_location(domain)

    def diff_lists(list_a, list_b):
        set_a = set(list_a)
        set_b = set(list_b)
        not_in_b = set_a.difference(set_b)
        not_in_a = set_a.difference(set_a)
        return list(not_in_a), list(not_in_b)

    with CouchTransaction() as transaction:
        type_sheets = workbook.get_all_type_sheets()
        total_tables = len(type_sheets)
        return_val.number_of_fixtures = total_tables

        def _update_progress(table_count, item_count, items_in_table):
            if task:
                processed = table_count * 10 + (10. * item_count /
                                                items_in_table)
                DownloadBase.set_progress(task, processed, 10 * total_tables)

        for table_number, table_def in enumerate(type_sheets):
            tag = table_def.table_id
            new_data_type = FixtureDataType(
                domain=domain,
                is_global=table_def.is_global,
                tag=table_def.table_id,
                fields=table_def.fields,
                item_attributes=table_def.item_attributes)
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif table_def.uid:
                    data_type = FixtureDataType.get(table_def.uid)
                else:
                    data_type = new_data_type

                if replace and data_type != new_data_type:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type

                data_type.fields = table_def.fields
                data_type.item_attributes = table_def.item_attributes
                data_type.is_global = table_def.is_global
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    return_val.errors.append(
                        _("'%(UID)s' is not a valid UID. But the new type is created."
                          ) % {'UID': table_def.uid})
                if table_def.delete:
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError) as e:
                data_type = new_data_type
            transaction.save(data_type)

            data_items = list(workbook.get_data_sheet(data_type))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH
                item_fields_list = di['field'].keys() if 'field' in di else []
                not_in_sheet, not_in_types = diff_lists(
                    item_fields_list, data_type.fields_without_attributes)
                if len(not_in_sheet) > 0:
                    error_message = _(
                        FAILURE_MESSAGES["has_no_field_column"]).format(
                            tag=tag, field=not_in_sheet[0])
                    raise ExcelMalformatException(error_message)
                if len(not_in_types) > 0:
                    error_message = _(
                        FAILURE_MESSAGES["has_extra_column"]).format(
                            tag=tag, field=not_in_types[0])
                    raise ExcelMalformatException(error_message)

                # check that this item has all the properties listed in its 'types' definition
                item_attributes_list = di['property'].keys(
                ) if 'property' in di else []
                not_in_sheet, not_in_types = diff_lists(
                    item_attributes_list, data_type.item_attributes)
                if len(not_in_sheet) > 0:
                    error_message = _(
                        FAILURE_MESSAGES["has_no_field_column"]).format(
                            tag=tag, field=not_in_sheet[0])
                    raise ExcelMalformatException(error_message)
                if len(not_in_types) > 0:
                    error_message = _(
                        FAILURE_MESSAGES["has_extra_column"]).format(
                            tag=tag, field=not_in_types[0])
                    raise ExcelMalformatException(error_message)

                # check that properties in 'types' sheet vs item-sheet MATCH
                for field in data_type.fields:
                    if len(field.properties) > 0:
                        sheet_props = di.get(field.field_name, {})
                        sheet_props_list = sheet_props.keys()
                        type_props = field.properties
                        not_in_sheet, not_in_types = diff_lists(
                            sheet_props_list, type_props)
                        if len(not_in_sheet) > 0:
                            error_message = _(
                                FAILURE_MESSAGES["sheet_has_no_property"]
                            ).format(tag=tag,
                                     property=not_in_sheet[0],
                                     field=field.field_name)
                            raise ExcelMalformatException(error_message)
                        if len(not_in_types) > 0:
                            error_message = _(
                                FAILURE_MESSAGES["sheet_has_extra_property"]
                            ).format(tag=tag,
                                     property=not_in_types[0],
                                     field=field.field_name)
                            raise ExcelMalformatException(error_message)
                        # check that fields with properties are numbered
                        if type(di['field'][field.field_name]) != list:
                            error_message = _(
                                FAILURE_MESSAGES["invalid_field_with_property"]
                            ).format(field=field.field_name)
                            raise ExcelMalformatException(error_message)
                        field_prop_len = len(di['field'][field.field_name])
                        for prop in sheet_props:
                            if type(sheet_props[prop]) != list:
                                error_message = _(
                                    FAILURE_MESSAGES["invalid_property"]
                                ).format(field=field.field_name, prop=prop)
                                raise ExcelMalformatException(error_message)
                            if len(sheet_props[prop]) != field_prop_len:
                                error_message = _(FAILURE_MESSAGES[
                                    "wrong_field_property_combos"]).format(
                                        field=field.field_name, prop=prop)
                                raise ExcelMalformatException(error_message)

                # excel format check should have been covered by this line. Can make assumptions about data now
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(field_list=[
                            FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=unicode(di['field'][
                                    field.field_name]),
                                properties={})
                        ])
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=unicode(field_prop_combos[x]),
                                properties={
                                    prop: unicode(prop_dict[prop][x])
                                    for prop in prop_dict
                                })
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list)

                item_attributes = di.get('property', {})
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    item_attributes=item_attributes,
                    sort_key=sort_key)
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields
                    old_data_item.item_attributes = item_attributes
                    if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        return_val.errors.append(
                            _("'%(UID)s' is not a valid UID. But the new item is created."
                              ) % {'UID': di['UID']})
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)
                old_locations = old_data_item.locations
                for location in old_locations:
                    old_data_item.remove_location(location)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        return_val.errors.append(
                            _("Unknown group: '%(name)s'. But the row is successfully added"
                              ) % {'name': group_name})

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(str(raw_username),
                                                      domain)
                    except ValidationError:
                        return_val.errors.append(
                            _("Invalid username: '******'. Row is not added")
                            % {'name': raw_username})
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        return_val.errors.append(
                            _("Unknown user: '******'. But the row is successfully added"
                              ) % {'name': raw_username})

                for name in di.get('location', []):
                    location_cache = get_location(name)
                    if location_cache.is_error:
                        return_val.errors.append(location_cache.message)
                    else:
                        old_data_item.add_location(location_cache.location,
                                                   transaction=transaction)

    return return_val
Beispiel #41
0
def run_upload(domain, workbook, replace=False, task=None):
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)

    pre_populate_location_groups(group_memoizer, domain)

    def diff_lists(list_a, list_b):
        set_a = set(list_a)
        set_b = set(list_b)
        not_in_b = set_a.difference(set_b)
        not_in_a = set_a.difference(set_a)
        return list(not_in_a), list(not_in_b)

    with CouchTransaction() as transaction:
        type_sheets = workbook.get_all_type_sheets()
        total_tables = len(type_sheets)
        return_val.number_of_fixtures = total_tables

        def _update_progress(table_count, item_count, items_in_table):
            if task:
                processed = table_count * 10 + (10. * item_count / items_in_table)
                DownloadBase.set_progress(task, processed, 10 * total_tables)

        for table_number, table_def in enumerate(type_sheets):
            tag = table_def.table_id
            new_data_type = FixtureDataType(
                domain=domain,
                is_global=table_def.is_global,
                tag=table_def.table_id,
                fields=table_def.fields,
                item_attributes=table_def.item_attributes
            )
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif table_def.uid:
                    data_type = FixtureDataType.get(table_def.uid)
                else:
                    data_type = new_data_type

                if replace and data_type != new_data_type:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type

                data_type.fields = table_def.fields
                data_type.item_attributes = table_def.item_attributes
                data_type.is_global = table_def.is_global
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    return_val.errors.append(_("'%(UID)s' is not a valid UID. But the new type is created.") % {'UID': table_def.uid})
                if table_def.delete:
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError) as e:
                data_type = new_data_type
            transaction.save(data_type)

            data_items = list(workbook.get_data_sheet(data_type))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                # Check that type definitions in 'types' sheet vs corresponding columns in the item-sheet MATCH
                item_fields_list = di['field'].keys() if 'field' in di else []
                not_in_sheet, not_in_types = diff_lists(item_fields_list, data_type.fields_without_attributes)
                if len(not_in_sheet) > 0:
                    error_message = _(FAILURE_MESSAGES["has_no_field_column"]).format(tag=tag, field=not_in_sheet[0])
                    raise ExcelMalformatException(error_message)
                if len(not_in_types) > 0:
                    error_message = _(FAILURE_MESSAGES["has_extra_column"]).format(tag=tag, field=not_in_types[0])
                    raise ExcelMalformatException(error_message)

                # check that this item has all the properties listed in its 'types' definition
                item_attributes_list = di['property'].keys() if 'property' in di else []
                not_in_sheet, not_in_types = diff_lists(item_attributes_list, data_type.item_attributes)
                if len(not_in_sheet) > 0:
                    error_message = _(FAILURE_MESSAGES["has_no_field_column"]).format(tag=tag, field=not_in_sheet[0])
                    raise ExcelMalformatException(error_message)
                if len(not_in_types) > 0:
                    error_message = _(FAILURE_MESSAGES["has_extra_column"]).format(tag=tag, field=not_in_types[0])
                    raise ExcelMalformatException(error_message)

                # check that properties in 'types' sheet vs item-sheet MATCH
                for field in data_type.fields:
                    if len(field.properties) > 0:
                        sheet_props = di.get(field.field_name, {})
                        sheet_props_list = sheet_props.keys()
                        type_props = field.properties
                        not_in_sheet, not_in_types = diff_lists(sheet_props_list, type_props)
                        if len(not_in_sheet) > 0:
                            error_message = _(FAILURE_MESSAGES["sheet_has_no_property"]).format(
                                tag=tag,
                                property=not_in_sheet[0],
                                field=field.field_name
                            )
                            raise ExcelMalformatException(error_message)
                        if len(not_in_types) > 0:
                            error_message = _(FAILURE_MESSAGES["sheet_has_extra_property"]).format(
                                tag=tag,
                                property=not_in_types[0],
                                field=field.field_name
                            )
                            raise ExcelMalformatException(error_message)
                        # check that fields with properties are numbered
                        if type(di['field'][field.field_name]) != list:
                            error_message = _(FAILURE_MESSAGES["invalid_field_with_property"]).format(field=field.field_name)
                            raise ExcelMalformatException(error_message)
                        field_prop_len = len(di['field'][field.field_name])
                        for prop in sheet_props:
                            if type(sheet_props[prop]) != list:
                                error_message = _(FAILURE_MESSAGES["invalid_property"]).format(
                                    field=field.field_name,
                                    prop=prop
                                )
                                raise ExcelMalformatException(error_message)
                            if len(sheet_props[prop]) != field_prop_len:
                                error_message = _(FAILURE_MESSAGES["wrong_field_property_combos"]).format(
                                    field=field.field_name,
                                    prop=prop
                                )
                                raise ExcelMalformatException(error_message)

                # excel format check should have been covered by this line. Can make assumptions about data now
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(
                            field_list=[FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=unicode(di['field'][field.field_name]),
                                properties={}
                            )]
                        )
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=unicode(field_prop_combos[x]),
                                properties={prop: unicode(prop_dict[prop][x]) for prop in prop_dict}
                            )
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list
                        )

                item_attributes = di.get('property', {})
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    item_attributes=item_attributes,
                    sort_key=sort_key
                )
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields
                    old_data_item.item_attributes = item_attributes
                    if old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        return_val.errors.append(_("'%(UID)s' is not a valid UID. But the new item is created.") % {'UID': di['UID']})
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        return_val.errors.append(_("Unknown group: '%(name)s'. But the row is successfully added") % {'name': group_name})

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(str(raw_username), domain)
                    except ValidationError:
                        return_val.errors.append(_("Invalid username: '******'. Row is not added") % {'name': raw_username})
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        return_val.errors.append(_("Unknown user: '******'. But the row is successfully added") % {'name': raw_username})

    return return_val
Beispiel #42
0
def run_upload_api(request, domain, workbook):
    return_val = {
        "unknown_groups": [],
        "unknown_users": [],
        "number_of_fixtures": 0,
    }
    group_memoizer = GroupMemoizer(domain)

    data_types = workbook.get_worksheet(title='types')

    def _get_or_raise(container, attr):
        try:
            return container[attr]
        except KeyError:
            raise Exception("Workbook 'types' has no column '{attr}'".format(attr=attr))
   
    number_of_fixtures = -1
    with CouchTransaction() as transaction:
        for number_of_fixtures, dt in enumerate(data_types):
            tag = _get_or_raise(dt, 'tag')
            data_type_results = FixtureDataType.by_domain_tag(domain, tag)
            if len(data_type_results) == 0:
                data_type = FixtureDataType(
                    domain=domain,
                    name=_get_or_raise(dt, 'name'),
                    tag=_get_or_raise(dt, 'tag'),
                    fields=_get_or_raise(dt, 'field'),
                )
                transaction.save(data_type)
            else:
                for x in data_type_results:
                    data_type = x

            data_items = workbook.get_worksheet(data_type.tag)
            for sort_key, di in enumerate(data_items):
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=di['field'],
                    sort_key=sort_key
                )
                try:
                    old_data_item = FixtureDataItem.get(di['UID'])
                    assert old_data_item.domain == domain
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    assert old_data_item.data_type_id == data_type.get_id
                    if di.get(DELETE_HEADER) in ("Y", "y"):
                        old_data_item.recursive_delete(transaction)
                        continue
                    old_data_item.fields = di['field']
                    transaction.save(old_data_item)
                except (AttributeError, KeyError, ResourceNotFound,
                        AssertionError):
                    old_data_item = new_data_item
                    transaction.save(old_data_item)

                old_groups = old_data_item.get_groups()
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.get_users()
                for user in old_users:
                    old_data_item.remove_user(user)

                for group_name in di.get('group', []):
                        group = group_memoizer.by_name(group_name)
                        if group:
                            old_data_item.add_group(group, transaction=transaction)
                        else:
                            messages.error(request, "Unknown group: %s" % group_name)

                for raw_username in di.get('user', []):
                        username = normalize_username(raw_username, domain)
                        user = CommCareUser.get_by_username(username)
                        if user:
                            old_data_item.add_user(user)
                        else:
                            messages.error(request, "Unknown user: %s" % raw_username)

    return_val["number_of_fixtures"] = number_of_fixtures + 1
    return return_val
Beispiel #43
0
 def delete(self):
     if self._fixture_id is None:
         return
     item = FixtureDataItem.get(self._fixture_id)
     item.delete()