def drilldown_map(self): diseases = [] disease_fixtures = FixtureDataItem.by_data_type( self.domain, FixtureDataType.by_domain_tag(self.domain, "diseases").one() ) for d in disease_fixtures: disease = dict( val="%(name)s:%(uid)s" % {'name': d.fields_without_attributes["disease_id"], 'uid': d.get_id}, text=d.fields_without_attributes["disease_name"] ) tests = [] test_fixtures = FixtureDataItem.by_field_value( self.domain, FixtureDataType.by_domain_tag(self.domain, "test").one(), "disease_id", d.fields_without_attributes["disease_id"] ) for t in test_fixtures: tests.append(dict( val="%(name)s:%(uid)s" % {'name': t.fields_without_attributes["test_name"], 'uid': t.get_id}, text=t.fields_without_attributes["visible_test_name"]) ) disease['next'] = tests diseases.append(disease) return diseases
def update_items(fields_patches, domain, data_type_id, transaction): data_items = FixtureDataItem.by_data_type(domain, data_type_id) for item in data_items: fields = item.fields updated_fields = {} patches = deepcopy(fields_patches) for old_field in fields.keys(): patch = patches.pop(old_field, {}) if not any(patch): updated_fields[old_field] = fields.pop(old_field) if "update" in patch: new_field_name = patch["update"] updated_fields[new_field_name] = fields.pop(old_field) if "remove" in patch: continue # destroy_field(field_to_delete, transaction) for new_field_name in patches.keys(): patch = patches.pop(new_field_name, {}) if "is_new" in patch: updated_fields[new_field_name] = FieldList( field_list=[] ) setattr(item, "fields", updated_fields) transaction.save(item) data_items = FixtureDataItem.by_data_type(domain, data_type_id)
def filter_context(self): root_fdis = [self.fdi_to_json(f) for f in FixtureDataItem.by_data_type(self.domain, self.data_types(0).get_id)] f_id = self.request.GET.get('fixture_id', None) selected_fdi_type = f_id.split(':')[0] if f_id else None selected_fdi_id = f_id.split(':')[1] if f_id else None if selected_fdi_id: index = 0 lineage = self.generate_lineage(selected_fdi_type, selected_fdi_id) parent = {'children': root_fdis} for i, fdi in enumerate(lineage[:-1]): this_fdi = [f for f in parent['children'] if f['id'] == fdi.get_id][0] next_h = self.hierarchy[i+1] this_fdi['children'] = [self.fdi_to_json(f) for f in FixtureDataItem.by_field_value(self.domain, self.data_types(i+1), next_h["parent_ref"], fdi.fields_without_attributes[next_h["references"]])] parent = this_fdi return { 'api_root': self.api_root, 'control_name': self.label, 'control_slug': self.slug, 'selected_fdi_id': selected_fdi_id, 'fdis': json.dumps(root_fdis), 'hierarchy': self.full_hierarchy }
def make_data_item(location_name, cost): """Make a fixture data item and assign it to location_name""" data_item = FixtureDataItem( domain=cls.domain, data_type_id=data_type.get_id, fields={ "cost": FieldList( field_list=[FixtureItemField( field_value=cost, properties={}, )] ), "location_name": FieldList( field_list=[FixtureItemField( field_value=location_name, properties={}, )] ), }, item_attributes={}, ) data_item.save() FixtureOwnership( domain=cls.domain, owner_id=cls.locations[location_name].location_id, owner_type='location', data_item_id=data_item.get_id ).save()
def get_all_providers(invalidate=False): """ wrapper function to get all the providers for PACT and cache them. ugly for now - the number of entries is small enough that loading all and scanning on checking is small enough overhead on a single page load. """ if invalidate: cache.delete(PACT_PROVIDERS_FIXTURE_CACHE_KEY) raw_cached_fixtures = cache.get(PACT_PROVIDERS_FIXTURE_CACHE_KEY, None) if raw_cached_fixtures is None: #requery and cache pact_hp_group = Group.by_name(PACT_DOMAIN, PACT_HP_GROUPNAME) providers = FixtureDataItem.by_group(pact_hp_group) cache.set(PACT_PROVIDERS_FIXTURE_CACHE_KEY, json.dumps([x.to_json() for x in providers])) return providers else: try: json_data = json.loads(raw_cached_fixtures) #not necessary in the grand scheme of things - we could really just use raw JSON return [FixtureDataItem.wrap(x) for x in json_data] except Exception, ex: logging.error("Error loading json from cache key %s: %s" % (PACT_PROVIDERS_FIXTURE_CACHE_KEY, ex)) return [] # cache.set('%s_casedoc' % self._id, json.dumps(self._case), PACT_CACHE_TIMEOUT) # xml_ret = cache.get('%s_schedule_xml' % self._id, None) pass
def generate_lineage(self, leaf_type, leaf_item_id): leaf_fdi = FixtureDataItem.get(leaf_item_id) index = None for i, h in enumerate(self.hierarchy[::-1]): if h["type"] == leaf_type: index = i if index is None: raise Exception( "Could not generate lineage for AsyncDrillableFilter due to a nonexistent leaf_type (%s)" % leaf_type) lineage = [leaf_fdi] for i, h in enumerate(self.full_hierarchy[::-1]): if i < index or i >= len(self.hierarchy) - 1: continue real_index = len(self.hierarchy) - (i + 1) lineage.insert( 0, FixtureDataItem.by_field_value( self.domain, self.data_types(real_index - 1), h["references"], lineage[0].fields_without_attributes[h["parent_ref"]] ).one()) return lineage
def get(self, request, *args, **kwargs): domain = self.kwargs['domain'] districts = FixtureDataItem.get_item_list(domain, 'district') cbos = FixtureDataItem.get_item_list(domain, 'cbo') clienttypes = FixtureDataItem.get_item_list(domain, 'clienttype') userpls = FixtureDataItem.get_item_list(domain, 'userpl') def to_filter_format(data, parent_key=None): locations = [dict( id='', text='All' )] for row in data: loc_id = row.fields['id'].field_list[0].field_value loc = dict( id=loc_id, text=loc_id ) if parent_key: parent_id = row.fields[parent_key].field_list[0].field_value loc.update({'parent_id': parent_id}) locations.append(loc) return locations hierarchy = { 'districts': to_filter_format(districts), 'cbos': to_filter_format(cbos, 'district_id'), 'clienttypes': to_filter_format(clienttypes, 'cbo_id'), 'userpls': to_filter_format(userpls, 'clienttype_id') } return JsonResponse(data=hierarchy)
def filter_context(self): root_fdis = [self.fdi_to_json(f) for f in FixtureDataItem.by_data_type(self.domain, self.data_types(0).get_id)] f_id = self.request.GET.get("fixture_id", None) selected_fdi_type = f_id.split(":")[0] if f_id else None selected_fdi_id = f_id.split(":")[1] if f_id else None if selected_fdi_id: index = 0 lineage = self.generate_lineage(selected_fdi_type, selected_fdi_id) parent = {"children": root_fdis} for i, fdi in enumerate(lineage[:-1]): this_fdi = [f for f in parent["children"] if f["id"] == fdi.get_id][0] next_h = self.hierarchy[i + 1] this_fdi["children"] = [ self.fdi_to_json(f) for f in FixtureDataItem.by_field_value( self.domain, self.data_types(i + 1), next_h["parent_ref"], fdi.fields[next_h["references"]] ) ] parent = this_fdi return { "api_root": self.api_root, "control_name": self.label, "control_slug": self.slug, "selected_fdi_id": selected_fdi_id, "fdis": json.dumps(root_fdis), "hierarchy": self.full_hierarchy, }
def _setup_data_item(self, risk='risk1', sequence='1', message='message1'): data_item = FixtureDataItem( domain=self.domain_name, data_type_id=self.data_type.get_id, fields={ "risk_profile": FieldList( field_list=[ FixtureItemField( field_value=risk, properties={} ) ] ), "sequence": FieldList( field_list=[ FixtureItemField( field_value=sequence, properties={} ) ] ), "message": FieldList( field_list=[ FixtureItemField( field_value=message, properties={} ) ] ), }, item_attributes={}, ) data_item.save() self.addCleanup(data_item.delete)
def download_item_lists(request, domain): data_types = FixtureDataType.by_domain(domain) data_type_schemas = [] max_fields = 0 max_groups = 0 max_users = 0 mmax_groups = 0 mmax_users = 0 data_tables = [] for data_type in data_types: type_schema = [data_type.name, data_type.tag] fields = [field for field in data_type.fields] type_id = data_type.get_id data_table_of_type = [] for item_row in FixtureDataItem.by_data_type(domain, type_id): group_len = len(item_row.get_groups()) max_groups = group_len if group_len>max_groups else max_groups user_len = len(item_row.get_users()) max_users = user_len if user_len>max_users else max_users for item_row in FixtureDataItem.by_data_type(domain, type_id): groups = [group.name for group in item_row.get_groups()] + ["" for x in range(0,max_groups-len(item_row.get_groups()))] users = [user.raw_username for user in item_row.get_users()] + ["" for x in range(0, max_users-len(item_row.get_users()))] data_row = tuple([str(_id_from_doc(item_row)),"N"]+ [item_row.fields[field] for field in fields]+ groups + users) data_table_of_type.append(data_row) type_schema.extend(fields) data_type_schemas.append(tuple(type_schema)) if max_fields<len(type_schema): max_fields = len(type_schema) data_tables.append((data_type.tag,tuple(data_table_of_type))) mmax_users = max_users if max_users>mmax_users else mmax_users mmax_groups = max_groups if max_groups>mmax_groups else mmax_groups max_users = 0 max_groups = 0 type_headers = ["name", "tag"] + ["field %d" % x for x in range(1, max_fields - 1)] type_headers = ("types", tuple(type_headers)) table_headers = [type_headers] for type_schema in data_type_schemas: item_header = (type_schema[1], tuple(["UID", DELETE_HEADER] + ["field: " + x for x in type_schema[2:]] + ["group %d" % x for x in range(1, mmax_groups + 1)] + ["user %d" % x for x in range(1, mmax_users + 1)])) table_headers.append(item_header) table_headers = tuple(table_headers) type_rows = ("types", tuple(data_type_schemas)) data_tables = tuple([type_rows]+data_tables) fd, path = tempfile.mkstemp() with os.fdopen(fd, 'w') as temp: export_raw((table_headers), (data_tables), temp) format = Format.XLS_2007 return export_response(open(path), format, "%s_fixtures" % domain)
def generate_lineage(self, leaf_type, leaf_item_id): leaf_fdi = FixtureDataItem.get(leaf_item_id) for i, h in enumerate(self.hierarchy[::-1]): if h["type"] == leaf_type: index = i lineage = [leaf_fdi] for i, h in enumerate(self.full_hierarchy[::-1]): if i < index or i >= len(self.hierarchy)-1: continue real_index = len(self.hierarchy) - (i+1) lineage.insert(0, FixtureDataItem.by_field_value(self.domain, self.data_types(real_index - 1), h["references"], lineage[0].fields[h["parent_ref"]]).one()) return lineage
def data_types(request, domain, data_type_id): if data_type_id: data_type = FixtureDataType.get(data_type_id) assert(data_type.doc_type == FixtureDataType._doc_type) assert(data_type.domain == domain) if request.method == 'GET': return json_response(strip_json(data_type)) elif request.method == 'PUT': new = FixtureDataType(domain=domain, **_to_kwargs(request)) for attr in 'tag', 'name', 'fields': setattr(data_type, attr, getattr(new, attr)) data_type.save() return json_response(strip_json(data_type)) elif request.method == 'DELETE': for item in FixtureDataItem.by_data_type(domain, data_type.get_id): item.delete() data_type.delete() return json_response({}) elif data_type_id is None: if request.method == 'POST': data_type = FixtureDataType(domain=domain, **_to_kwargs(request)) data_type.save() return json_response(strip_json(data_type)) elif request.method == 'GET': return json_response([strip_json(x) for x in FixtureDataType.by_domain(domain)]) return HttpResponseBadRequest()
def get_users_per_dctl(cls): dctls = dict() data_type = FixtureDataType.by_domain_tag(cls.domain, 'dctl').first() data_items = FixtureDataItem.by_data_type(cls.domain, data_type.get_id if data_type else None) for item in data_items: dctls[item.fields_without_attributes.get("id")] = item.get_users(wrap=False) return dctls
def obj_update(self, bundle, **kwargs): if 'data_type_id' not in bundle.data: raise BadRequest("data_type_id must be specified") try: bundle.obj = FixtureDataItem.get(kwargs['pk']) except ResourceNotFound: raise NotFound('Lookup table item not found') if bundle.obj.domain != kwargs['domain']: raise NotFound('Lookup table item not found') save = False if 'fields' in bundle.data: save = True bundle.obj.fields = { field_name: FieldList.wrap(field_list) for field_name, field_list in bundle.data['fields'].items() } if 'item_attributes' in bundle.data: save = True bundle.obj.item_attributes = bundle.data['item_attributes'] if save: bundle.obj.save() return bundle
def test_sees_own_fixture_and_parent_fixture(self): fixture_items = FixtureDataItem.by_user(self.boston_user) self.assertItemsEqual( [(self._get_value(item, 'cost'), self._get_value(item, 'location_name')) for item in fixture_items], [('8', 'Suffolk'), ('10', 'Boston')] )
def item_lists(user, version=V2, last_sync=None): if isinstance(user, CommCareUser): pass elif hasattr(user, "_hq_user") and user._hq_user is not None: user = user._hq_user else: return [] items = FixtureDataItem.by_user(user) data_types = {} items_by_type = defaultdict(list) for item in items: if not data_types.has_key(item.data_type_id): try: data_types[item.data_type_id] = FixtureDataType.get(item.data_type_id) except (ResourceNotFound, AttributeError): continue items_by_type[item.data_type_id].append(item) item._data_type = data_types[item.data_type_id] fixtures = [] for data_type in data_types.values(): xFixture = ElementTree.Element('fixture', attrib={'id': 'item-list:%s' % data_type.tag, 'user_id': user.user_id}) xItemList = ElementTree.Element('%s_list' % data_type.tag) xFixture.append(xItemList) for item in items_by_type[data_type.get_id]: xItemList.append(item.to_xml()) fixtures.append(xFixture) return fixtures
def setUp(self): self.domain = 'qwerty' self.data_type = FixtureDataType( domain=self.domain, tag="contact", name="Contact", fields=['name', 'number'] ) self.data_type.save() self.data_item = FixtureDataItem( domain=self.domain, data_type_id=self.data_type.get_id, fields={ 'name': 'John', 'number': '+15555555555' } ) self.data_item.save() self.user = CommCareUser.create(self.domain, 'rudolph', '***') self.fixture_ownership = FixtureOwnership( domain=self.domain, owner_id=self.user.get_id, owner_type='user', data_item_id=self.data_item.get_id ) self.fixture_ownership.save()
def _get_global_items(self, global_types, domain, bypass_cache): items_by_type = defaultdict(list) for item in FixtureDataItem.by_data_types(domain, global_types, bypass_cache): data_type = global_types[item.data_type_id] self._set_cached_type(item, data_type) items_by_type[data_type].append(item) return self._get_fixtures(global_types, items_by_type, GLOBAL_USER_ID)
def test_update(self): data_item = self._create_data_item() data_item_update = { "data_type_id": self.data_type._id, "fields": { "state_name": { "field_list": [ {"field_value": "Massachusetts", "properties": {"lang": "en"}}, {"field_value": "马萨诸塞", "properties": {"lang": "zh"}}, ] } }, "item_attributes": { "attribute1": "cool_attr_value", } } response = self._assert_auth_post_resource( self.single_endpoint(data_item._id), json.dumps(data_item_update), method="PUT") data_item = FixtureDataItem.get(data_item._id) self.assertEqual(response.status_code, 204) self.assertEqual(data_item.data_type_id, self.data_type._id) self.assertEqual(len(data_item.fields), 1) self.assertEqual(data_item.fields['state_name'].field_list[0].field_value, 'Massachusetts') self.assertEqual(data_item.fields['state_name'].field_list[0].properties, {"lang": "en"}) self.assertEqual(data_item.item_attributes, {"attribute1": "cool_attr_value"})
def item_lists(user, version=V2, last_sync=None): if isinstance(user, CommCareUser): pass elif hasattr(user, "_hq_user") and user._hq_user is not None: user = user._hq_user else: return [] all_types = dict([(t._id, t) for t in FixtureDataType.by_domain(user.domain)]) global_types = dict([(id, t) for id, t in all_types.items() if t.is_global]) items_by_type = defaultdict(list) def _set_cached_type(item, data_type): # set the cached version used by the object so that it doesn't # have to do another db trip later item._data_type = data_type for global_fixture in global_types.values(): items = list(FixtureDataItem.by_data_type(user.domain, global_fixture)) _ = [_set_cached_type(item, global_fixture) for item in items] items_by_type[global_fixture._id] = items other_items = FixtureDataItem.by_user(user) data_types = {} for item in other_items: if item.data_type_id in global_types: continue # was part of the global type so no need to add here if not data_types.has_key(item.data_type_id): try: data_types[item.data_type_id] = all_types[item.data_type_id] except (AttributeError, KeyError): continue items_by_type[item.data_type_id].append(item) _set_cached_type(item, data_types[item.data_type_id]) fixtures = [] all_types = data_types.values() + global_types.values() for data_type in all_types: xFixture = ElementTree.Element('fixture', attrib={'id': 'item-list:%s' % data_type.tag, 'user_id': user.user_id}) xItemList = ElementTree.Element('%s_list' % data_type.tag) xFixture.append(xItemList) for item in sorted(items_by_type[data_type.get_id], key=lambda x: x.sort_key): xItemList.append(item.to_xml()) fixtures.append(xFixture) return fixtures
def province(self): case = self.care_case fixture_type = FixtureDataType.by_domain_tag("care-ihapc-live", "province").first() fixture_item = FixtureDataItem.by_field_value("care-ihapc-live", fixture_type, "id", case.province).first() return fixture_item._id
def save(self): data_type = FixtureDataType.by_domain_tag(self.objects.domain, self.objects.tag).one() if data_type is None: raise Dhis2ConfigurationError( 'Unable to find lookup table in domain "%s" with ID "%s".' % (self.objects.domain, self.objects.tag)) data_item = FixtureDataItem( data_type_id=data_type.get_id, domain=self.objects.domain, fields={ 'id': to_field_list(self.id), 'name': to_field_list(self.name), 'parent_id': to_field_list(self.parent_id) }) data_item.save() self._fixture_id = data_item.get_id return self._fixture_id
def get_user_site_map(domain): user_site_map = defaultdict(list) data_type = FixtureDataType.by_domain_tag(domain, 'site').first() fixtures = FixtureDataItem.by_data_type(domain, data_type.get_id) for fixture in fixtures: for user in fixture.get_users(): user_site_map[user._id].append(fixture.fields['site_id']) return user_site_map
def diseases(self): disease_fixtures = FixtureDataItem.by_data_type( self.domain, FixtureDataType.by_domain_tag(self.domain, "diseases").one() ) return { "ids": [d.fields["disease_id"] for d in disease_fixtures], "names": [d.fields["disease_name"] for d in disease_fixtures], }
def obj_delete(self, bundle, **kwargs): try: data_item = FixtureDataItem.get(kwargs['pk']) except ResourceNotFound: raise NotFound('Lookup table item not found') with CouchTransaction() as transaction: data_item.recursive_delete(transaction) return ImmediateHttpResponse(response=HttpAccepted())
def _run_upload(self, request, workbook): group_memoizer = GroupMemoizer(self.domain) data_types = workbook.get_worksheet(title='types') def _get_or_raise(container, attr, message): try: return container[attr] except KeyError: raise Exception(message.format(attr=attr)) with CouchTransaction() as transaction: for dt in data_types: err_msg = "Workbook 'types' has no column '{attr}'" data_type = FixtureDataType( domain=self.domain, name=_get_or_raise(dt, 'name', err_msg), tag=_get_or_raise(dt, 'tag', err_msg), fields=_get_or_raise(dt, 'field', err_msg), ) transaction.save(data_type) data_items = workbook.get_worksheet(data_type.tag) for sort_key, di in enumerate(data_items): data_item = FixtureDataItem( domain=self.domain, data_type_id=data_type.get_id, fields=di['field'], sort_key=sort_key ) transaction.save(data_item) for group_name in di.get('group', []): group = group_memoizer.by_name(group_name) if group: data_item.add_group(group, transaction=transaction) else: messages.error(request, "Unknown group: %s" % group_name) for raw_username in di.get('user', []): username = normalize_username(raw_username, self.domain) user = CommCareUser.get_by_username(username) if user: data_item.add_user(user) else: messages.error(request, "Unknown user: %s" % raw_username) for data_type in transaction.preview_save(cls=FixtureDataType): for duplicate in FixtureDataType.by_domain_tag(domain=self.domain, tag=data_type.tag): duplicate.recursive_delete(transaction)
def user_to_dctl(self, user): dctl_name = "Unknown DCTL" dctl_id = None data_items = list(FixtureDataItem.by_user(user, domain=self.domain)) for item in data_items: if item.data_type_id == self.dctl_fixture: dctl_id = item.fields_without_attributes.get("id") dctl_name = item.fields_without_attributes.get("name", dctl_name) return dctl_id, dctl_name
def _create_data_item(self, cleanup=True): data_item = FixtureDataItem( domain=self.domain.name, data_type_id=self.data_type._id, fields={ "state_name": FieldList.wrap({ "field_list": [ {"field_value": "Tennessee", "properties": {"lang": "en"}}, {"field_value": "田納西", "properties": {"lang": "zh"}}, ]}) }, item_attributes={}, sort_key=1 ) data_item.save() if cleanup: self.addCleanup(data_item.delete) return data_item
def user_to_dctl(self, user): dctl_name = "Unknown DCTL" dctl_id = None data_items = list(FixtureDataItem.by_user(user, domain=self.domain)) for item in data_items: if item.data_type_id == self.dctl_fixture: dctl_id = item.fields.get('id') dctl_name = item.fields.get('name', dctl_name) return dctl_id, dctl_name
def get_fixture_items_for_data_types(domain, data_type_ids, bypass_cache=False): from corehq.apps.fixtures.models import FixtureDataItem return list(FixtureDataItem.view( 'fixtures/data_items_by_domain_type', keys=[[domain, id] for id in data_type_ids], reduce=False, include_docs=True, descending=True ))
def get_fixture_items_for_data_type(domain, data_type_id, bypass_cache=False): from corehq.apps.fixtures.models import FixtureDataItem return list( FixtureDataItem.view( 'fixtures/data_items_by_domain_type', startkey=[domain, data_type_id], endkey=[domain, data_type_id, {}], reduce=False, include_docs=True, ))
def _make_data_item(cls, sort_key, name, seat, sigil): def field_list(value): return FieldList(field_list=[ FixtureItemField(field_value=value, properties={}) ]) data_item = FixtureDataItem( domain=cls.domain, data_type_id=cls.data_type._id, fields={ "name": field_list(name), "seat": field_list(seat), "sigil": field_list(sigil), }, item_attributes={}, sort_key=sort_key, ) data_item.save() return data_item
def delete_unneeded_fixture_data_item(self, domain, data_type_id): """Deletes all fixture data items and their ownership models based on their data type. Note that this does not bust any caches meaning that the data items could still be returned to the user for some time """ item_ids = [] try: for items in chunked(FixtureDataItem.by_data_type(domain, data_type_id), 1000): FixtureDataItem.delete_docs(items) item_ids.extend([item.get_id for item in items]) for item_id_chunk in chunked(item_ids, 1000): for docs in chunked(FixtureOwnership.for_all_item_ids(item_id_chunk, domain), 1000): FixtureOwnership.delete_docs(docs) except (KeyboardInterrupt, SystemExit): raise except Exception as exc: # there's no base exception in couchdbkit to catch, so must use Exception self.retry(exc=exc)
def get_fixture_items_for_data_types(domain, data_type_ids, bypass_cache=False): from corehq.apps.fixtures.models import FixtureDataItem return list( FixtureDataItem.view('fixtures/data_items_by_domain_type', keys=[[domain, id] for id in data_type_ids], reduce=False, include_docs=True, descending=True))
def _get_message_bank_content(fixture_name, domain, schedule_iteration_num, current_event_num, num_events, recipient): message_bank = FixtureDataType.by_domain_tag(domain, fixture_name).first() if not message_bank: message = "Lookup Table {} not found in {}".format( fixture_name, domain) notify_dimagi_project_admins(domain, message=message) return None fields = message_bank.fields_without_attributes if any(field not in fields for field in REQUIRED_FIXTURE_FIELDS): message = "{} in {} must have {}".format( fixture_name, domain, ','.join(REQUIRED_FIXTURE_FIELDS)) notify_dimagi_project_admins(domain, message=message) return None if not is_commcarecase(recipient): recipient_id = getattr(recipient, '_id') if hasattr( recipient, '_id') else 'id_unknown' message = "recipient {} must be a case in domain {}".format( recipient_id, domain) notify_dimagi_project_admins(domain, message=message) return None try: risk_profile = recipient.dynamic_case_properties()[RISK_PROFILE_FIELD] except KeyError: message = "case {} does not include risk_profile".format( recipient.case_id) notify_dimagi_project_admins(domain, message=message) return None current_message_seq_num = str(((schedule_iteration_num - 1) * num_events) + current_event_num + 1) custom_messages = FixtureDataItem.by_field_value(domain, message_bank, RISK_PROFILE_FIELD, risk_profile) custom_messages = [ m for m in custom_messages if m.fields_without_attributes['sequence'] == current_message_seq_num ] if len(custom_messages) != 1: if not custom_messages: message = "No message for case {}, risk {}, seq {} in domain {} in fixture {}" else: message = "Multiple messages for case {}, risk {}, seq {} in domain {} in fixture {}" message = message.format(recipient.case_id, risk_profile, current_message_seq_num, domain, fixture_name) notify_dimagi_project_admins(domain, message=message) return None return custom_messages[0].fields_without_attributes['message']
def update_fixture(domain_link, tag): if domain_link.is_remote: master_results = remote_fixture(domain_link, tag) else: master_results = local_fixture(domain_link.master_domain, tag) master_data_type = master_results["data_type"] if not master_data_type.is_global: raise UnsupportedActionError( _("Found non-global lookup table '{}'.").format( master_data_type.tag)) # Update data type master_data_type = master_data_type.to_json() del master_data_type["_id"] del master_data_type["_rev"] linked_data_type = get_fixture_data_type_by_tag(domain_link.linked_domain, master_data_type["tag"]) if linked_data_type: linked_data_type = linked_data_type.to_json() else: linked_data_type = {} linked_data_type.update(master_data_type) linked_data_type["domain"] = domain_link.linked_domain linked_data_type = FixtureDataType.wrap(linked_data_type) linked_data_type.save() clear_fixture_quickcache(domain_link.linked_domain, [linked_data_type]) # Re-create relevant data items delete_fixture_items_for_data_type(domain_link.linked_domain, linked_data_type._id) for master_item in master_results["data_items"]: doc = master_item.to_json() del doc["_id"] del doc["_rev"] doc["domain"] = domain_link.linked_domain doc["data_type_id"] = linked_data_type._id FixtureDataItem.wrap(doc).save() clear_fixture_cache(domain_link.linked_domain)
def copy_fixtures(self): from corehq.apps.fixtures.models import FixtureDataItem from corehq.apps.fixtures.dbaccessors import get_fixture_data_types_in_domain fixture_types = get_fixture_data_types_in_domain(self.existing_domain) for fixture_type in fixture_types: old_id, new_id = self.save_couch_copy(fixture_type, self.new_domain) for item in FixtureDataItem.by_data_type(self.existing_domain, old_id): item.data_type_id = new_id self.save_couch_copy(item, self.new_domain)
def generate_lineage(self, leaf_type, leaf_item_id): leaf_fdi = FixtureDataItem.get(leaf_item_id) index = None for i, h in enumerate(self.hierarchy[::-1]): if h["type"] == leaf_type: index = i if index is None: raise Exception( "Could not generate lineage for AsyncDrillableFilter due to a nonexistent leaf_type (%s)" % leaf_type) lineage = [leaf_fdi] for i, h in enumerate(self.full_hierarchy[::-1]): if i < index or i >= len(self.hierarchy)-1: continue real_index = len(self.hierarchy) - (i+1) lineage.insert(0, FixtureDataItem.by_field_value(self.domain, self.data_types(real_index - 1), h["references"], lineage[0].fields_without_attributes[h["parent_ref"]]).one()) return lineage
def __call__(self, restore_user, version, last_sync=None, app=None): assert isinstance(restore_user, OTARestoreUser) all_types = dict([ (t._id, t) for t in FixtureDataType.by_domain(restore_user.domain) ]) global_types = dict([(id, t) for id, t in all_types.items() if t.is_global]) items_by_type = defaultdict(list) def _set_cached_type(item, data_type): # set the cached version used by the object so that it doesn't # have to do another db trip later item._data_type = data_type for global_fixture in global_types.values(): items = list( FixtureDataItem.by_data_type(restore_user.domain, global_fixture)) _ = [_set_cached_type(item, global_fixture) for item in items] items_by_type[global_fixture._id] = items other_items = restore_user.get_fixture_data_items() data_types = {} for item in other_items: if item.data_type_id in global_types: continue # was part of the global type so no need to add here if item.data_type_id not in data_types: try: data_types[item.data_type_id] = all_types[ item.data_type_id] except (AttributeError, KeyError): continue items_by_type[item.data_type_id].append(item) _set_cached_type(item, data_types[item.data_type_id]) fixtures = [] all_types_to_sync = data_types.values() + global_types.values() for data_type in all_types_to_sync: fixtures.append( self._get_fixture_element( data_type.tag, restore_user.user_id, sorted(items_by_type[data_type.get_id], key=lambda x: x.sort_key))) for data_type_id, data_type in all_types.iteritems(): if data_type_id not in global_types and data_type_id not in data_types: fixtures.append( self._get_fixture_element(data_type.tag, restore_user.user_id, [])) return fixtures
def _setup_data_item(self, risk='risk1', sequence='1', message='message1'): data_item = FixtureDataItem( domain=self.domain_name, data_type_id=self.data_type.get_id, fields={ "risk_profile": FieldList(field_list=[ FixtureItemField(field_value=risk, properties={}) ]), "sequence": FieldList(field_list=[ FixtureItemField(field_value=sequence, properties={}) ]), "message": FieldList(field_list=[ FixtureItemField(field_value=message, properties={}) ]), }, item_attributes={}, ) data_item.save() self.addCleanup(data_item.delete)
def obj_get_list(self, bundle, **kwargs): domain = kwargs['domain'] parent_id = bundle.request.GET.get("parent_id", None) parent_ref_name = bundle.request.GET.get("parent_ref_name", None) references = bundle.request.GET.get("references", None) child_type = bundle.request.GET.get("child_type", None) type_id = bundle.request.GET.get("fixture_type_id", None) type_tag = bundle.request.GET.get("fixture_type", None) if parent_id and parent_ref_name and child_type and references: parent_fdi = FixtureDataItem.get(parent_id) fdis = list( FixtureDataItem.by_field_value( domain, child_type, parent_ref_name, parent_fdi.fields_without_attributes[references])) elif type_id or type_tag: type_id = type_id or FixtureDataType.by_domain_tag( domain, type_tag).one() fdis = list(FixtureDataItem.by_data_type(domain, type_id)) else: fdis = list(FixtureDataItem.by_domain(domain)) return [convert_fdt(fdi) for fdi in fdis] or []
def make_item_lists(tag, item_name): data_type = FixtureDataType( domain=DOMAIN, tag=tag, name="Provinces", fields=[FixtureTypeField(field_name="name", properties=[])], item_attributes=[], is_global=True ) data_type.save() data_item = FixtureDataItem( domain=DOMAIN, data_type_id=data_type.get_id, fields={ "name": FieldList( field_list=[FixtureItemField(field_value=item_name, properties={})] ) }, item_attributes={}, ) data_item.save() return data_type, data_item
def report_subtitles(self): if self.needs_filters: return [] subtitles = ["Date range: %s" % self.daterange_display] if self.selected_fixture(): tag, id = self.selected_fixture() location = FixtureDataItem.get(id).fields_without_attributes[ '%s_name' % tag] subtitles.append('Location: %s' % location) if self.disease: location = FixtureDataItem.get( self.disease[1]).fields_without_attributes['disease_name'] subtitles.append('Disease: %s' % location) if self.test_version: test_version = FixtureDataItem.get( self.test_version[1] ).fields_without_attributes['visible_test_name'] subtitles.append('Test Version: %s' % test_version) return subtitles
def diseases(self): disease_fixtures = FixtureDataItem.by_data_type( self.domain, FixtureDataType.by_domain_tag(self.domain, "diseases").one()) return { "ids": [ d.fields_without_attributes["disease_id"] for d in disease_fixtures ], "names": [ d.fields_without_attributes["disease_name"] for d in disease_fixtures ] }
def update_items(fields_patches, domain, data_type_id, transaction): data_items = FixtureDataItem.by_data_type(domain, data_type_id) for item in data_items: fields = item.fields updated_fields = {} patches = deepcopy(fields_patches) for old_field in fields.keys(): patch = patches.pop(old_field, {}) if not any(patch): updated_fields[old_field] = fields.pop(old_field) if "update" in patch: new_field_name = patch["update"] updated_fields[new_field_name] = fields.pop(old_field) if "remove" in patch: continue # destroy_field(field_to_delete, transaction) for new_field_name in patches.keys(): patch = patches.pop(new_field_name, {}) if "is_new" in patch: updated_fields[new_field_name] = FieldList(field_list=[]) setattr(item, "fields", updated_fields) transaction.save(item) data_items = FixtureDataItem.by_data_type(domain, data_type_id)
def _process_data_item(domain, replace, data_type, di, item_fields, item_attributes, sort_key): """Processes a FixtureDataItem from its excel upload. Returns a tuple with - (unsaved) FixtureDataItem instance - boolean flag to indiicate if the data item was deleted - a list of errors """ delete = False errors = [] new_data_item = FixtureDataItem( domain=domain, data_type_id=data_type.get_id, fields=item_fields, item_attributes=item_attributes, sort_key=sort_key ) try: if di['UID'] and not replace: old_data_item = FixtureDataItem.get(di['UID']) else: old_data_item = new_data_item old_data_item.fields = item_fields old_data_item.item_attributes = item_attributes if (old_data_item.domain != domain or not old_data_item.data_type_id == data_type.get_id): old_data_item = new_data_item errors.append( _("'%(UID)s' is not a valid UID. But the new item is created.") % {'UID': di['UID']} ) if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y": delete = True except (ResourceNotFound, KeyError): old_data_item = new_data_item return old_data_item, delete, errors
def filter_context(self): root_fdis = [ self.fdi_to_json(f) for f in FixtureDataItem.by_data_type(self.domain, self.data_types(0).get_id) ] f_id = self.request.GET.get('fixture_id', None) selected_fdi_type = f_id.split(':')[0] if f_id else None selected_fdi_id = f_id.split(':')[1] if f_id else None if selected_fdi_id: index = 0 lineage = self.generate_lineage(selected_fdi_type, selected_fdi_id) parent = {'children': root_fdis} for i, fdi in enumerate(lineage[:-1]): this_fdi = [ f for f in parent['children'] if f['id'] == fdi.get_id ][0] next_h = self.hierarchy[i + 1] this_fdi['children'] = [ self.fdi_to_json(f) for f in FixtureDataItem.by_field_value( self.domain, self.data_types(i + 1), next_h["parent_ref"], fdi.fields_without_attributes[next_h["references"]]) ] parent = this_fdi return { 'api_root': self.api_root, 'control_name': self.label, 'control_slug': self.slug, 'selected_fdi_id': selected_fdi_id, 'fdis': json.dumps(root_fdis), 'hierarchy': self.full_hierarchy }
def copy_fixtures(self): from corehq.apps.fixtures.models import FixtureDataItem from corehq.apps.fixtures.dbaccessors import get_fixture_data_types_in_domain fixture_types = get_fixture_data_types_in_domain(self.existing_domain) for fixture_type in fixture_types: old_id, new_id = self.save_couch_copy(fixture_type, self.new_domain) for item in FixtureDataItem.by_data_type(self.existing_domain, old_id): item.data_type_id = new_id self.save_couch_copy(item, self.new_domain) # TODO: FixtureOwnership - requires copying users & groups existing_fixture_config = CalendarFixtureSettings.for_domain(self.existing_domain) self.save_sql_copy(existing_fixture_config, self.new_domain)
def _get_fixture_element(self, data_type, user_id, items): attrib = {'id': ':'.join((self.id, data_type.tag)), 'user_id': user_id} if data_type.is_indexed: attrib['indexed'] = 'true' fixture_element = ElementTree.Element('fixture', attrib) item_list_element = ElementTree.Element('%s_list' % data_type.tag) fixture_element.append(item_list_element) for item in items: try: xml = self.to_xml(item) except KeyError: # catch docs missed in prior lazy migrations xml = self.to_xml(FixtureDataItem.wrap(item).to_json()) item_list_element.append(xml) return fixture_element
def config(self): loc = None type = 'loc' if 'fixture_id' in self.request_params and self.request_params.get('fixture_id'): type, id = self.request_params.get('fixture_id').split(":") loc = FixtureDataItem.get(id).fields['name']['field_list'][0]['field_value'] return { 'domain': self.domain, 'startdate': self.datespan.startdate_param_utc, 'enddate': self.datespan.enddate_param_utc, type: loc, 'one': 1, 'zero': 0, 'not': -1, 'empty': '' }
def getFacilities(cls, domain=None): cls.domain = domain or cls.domain facs = dict() data_type = FixtureDataType.by_domain_tag(cls.domain, 'site').first() fixtures = FixtureDataItem.by_data_type(cls.domain, data_type.get_id) for fix in fixtures: region = fix.fields_without_attributes.get("region_id") district = fix.fields_without_attributes.get("district_id") site = fix.fields_without_attributes.get("site_number") if region not in facs: facs[region] = dict(name=fix.fields_without_attributes.get("region_name"), districts=dict()) if district not in facs[region]["districts"]: facs[region]["districts"][district] = dict(name=fix.fields_without_attributes.get("district_name"), sites=dict()) if site not in facs[region]["districts"][district]["sites"]: facs[region]["districts"][district]["sites"][site] = dict(name=fix.fields_without_attributes.get("site_name")) return facs
def __call__(self, restore_state): restore_user = restore_state.restore_user all_types = { t._id: t for t in FixtureDataType.by_domain(restore_user.domain) } global_types = {id: t for id, t in all_types.items() if t.is_global} items_by_type = defaultdict(list) def _set_cached_type(item, data_type): # set the cached version used by the object so that it doesn't # have to do another db trip later item._data_type = data_type for global_fixture in global_types.values(): items = FixtureDataItem.by_data_type(restore_user.domain, global_fixture) _ = [_set_cached_type(item, global_fixture) for item in items] items_by_type[global_fixture._id] = items if set(all_types) - set(global_types): # only query ownership models if there are non-global types other_items = restore_user.get_fixture_data_items() for item in other_items: if item.data_type_id in global_types: continue # was part of the global type so no need to add here try: _set_cached_type(item, all_types[item.data_type_id]) except (AttributeError, KeyError): continue items_by_type[item.data_type_id].append(item) fixtures = [] types_sorted_by_tag = sorted(all_types.iteritems(), key=lambda (id_, type_): type_.tag) for data_type_id, data_type in types_sorted_by_tag: if data_type.is_indexed: fixtures.append(self._get_schema_element(data_type)) items = sorted(items_by_type.get(data_type_id, []), key=lambda x: x.sort_key) fixtures.append( self._get_fixture_element(data_type, restore_user.user_id, items)) return fixtures
def __init__(self, request, base_context=None, domain=None, **kwargs): super(MCBase, self).__init__(request, base_context, domain, **kwargs) assert self.SECTIONS is not None assert self.format_class is not None fixture = self.request_params.get('fixture_id', None) if fixture: type_string, id = fixture.split(":") results = FixtureDataType.by_domain_tag(domain, type_string) fixture_type = results.one() fixture_item = FixtureDataItem.get(id) else: fixture_item = None fixture_type = None sqldata = McSqlData(self.SECTIONS, self.format_class, domain, self.datespan, fixture_type, fixture_item) self.data_provider = MCSectionedDataProvider(sqldata)
def _get_facilities(cls, domain=None): domain = domain or cls.domain facilities = dict(ihf=[], chf=[]) data_type = FixtureDataType.by_domain_tag(domain, 'site').first() data_items = FixtureDataItem.by_data_type(domain, data_type.get_id) for item in data_items: ihf_chf = item.fields_without_attributes.get("ihf_chf", "").lower() if ihf_chf == 'ifh': # typo in some test data ihf_chf = 'ihf' try: facilities[ihf_chf].append(item.fields_without_attributes) except KeyError: # there's a site fixture item without an IHF/CHF value pass return facilities
def test_row_addition(self): # upload and then reupload with addition of a new fixture-item should create new items initial_rows = [(None, 'N', 'apple')] rows = self.make_rows(initial_rows) workbook = self._get_workbook_from_data(self.headers, rows) _run_fixture_upload(self.domain, workbook) self.assertListEqual(self.get_fixture_items('name'), ['apple']) # reupload with additional row apple_id = FixtureDataItem.get_item_list(self.domain, 'things')[0]._id new_rows = [(apple_id, 'N', 'apple'), (None, 'N', 'orange')] workbook = self._get_workbook_from_data(self.headers, self.make_rows(new_rows)) _run_fixture_upload(self.domain, workbook) self.assertItemsEqual(self.get_fixture_items('name'), ['apple', 'orange'])
def test_update(self): data_item = self._create_data_item() data_item_update = { "data_type_id": self.data_type._id, "fields": { "state_name": { "field_list": [ { "field_value": "Massachusetts", "properties": { "lang": "en" } }, { "field_value": "马萨诸塞", "properties": { "lang": "zh" } }, ] } }, "item_attributes": { "attribute1": "cool_attr_value", } } response = self._assert_auth_post_resource( self.single_endpoint(data_item._id), json.dumps(data_item_update), method="PUT") data_item = FixtureDataItem.get(data_item._id) self.assertEqual(response.status_code, 204) self.assertEqual(data_item.data_type_id, self.data_type._id) self.assertEqual(len(data_item.fields), 1) self.assertEqual( data_item.fields['state_name'].field_list[0].field_value, 'Massachusetts') self.assertEqual( data_item.fields['state_name'].field_list[0].properties, {"lang": "en"}) self.assertEqual(data_item.item_attributes, {"attribute1": "cool_attr_value"})
def selected_tests(self): disease = self.request.GET.get('test_type_disease', '') test = self.request.GET.get('test_type_test', '') disease = disease.split(':') if disease else None test = test.split(':') if test else None if test: return [test[0]] elif disease: test_fixtures = FixtureDataItem.by_field_value( self.domain, FixtureDataType.by_domain_tag(self.domain, "test").one(), "disease_id", disease[0]) return [ t.fields_without_attributes["test_name"] for t in test_fixtures ] else: return self.test_types
def test_fixture_data_item_to_dict(): data_item = FixtureDataItem( domain='test-domain', data_type_id='123456', fields={ 'id': FieldList( doc_type='FieldList', field_list=[ FixtureItemField( doc_type='FixtureItemField', field_value='789abc', properties={} ) ] ), 'name': FieldList( doc_type='FieldList', field_list=[ FixtureItemField( doc_type='FixtureItemField', field_value='John', properties={'lang': 'en'} ), FixtureItemField( doc_type='FixtureItemField', field_value='Jan', properties={'lang': 'nld'} ), FixtureItemField( doc_type='FixtureItemField', field_value='Jean', properties={'lang': 'fra'} ), ] ) } ) dict_ = fixture_data_item_to_dict(data_item) assert_equal(dict_, { 'id': '789abc', 'name': 'John' })
def data_item_users(request, domain, data_type_id, data_item_id, user_id): data_type = FixtureDataType.get(data_type_id) data_item = FixtureDataItem.get(data_item_id) user = CommCareUser.get(user_id) assert(data_type.doc_type == FixtureDataType._doc_type) assert(data_type.domain == domain) assert(data_item.doc_type == FixtureDataItem._doc_type) assert(data_item.domain == domain) assert(data_item.data_type_id == data_type_id) assert(user.doc_type == CommCareUser._doc_type) assert(user.domain == domain) if request.method == 'POST': data_item.add_user(user) return json_response({}) elif request.method == 'DELETE': data_item.remove_user(user) return json_response({}) else: return HttpResponseBadRequest()
def _import_fixtures(domain): for fixture_name, filename in [ ('recipes', 'recipes.csv'), ('conv_factors', 'conv_factors.csv'), ('food_list', 'food_list.csv'), ('food_composition_table', 'food_composition_table.csv'), ]: fields, rows = _read_csv(filename) data_type = FixtureDataType( domain=domain, tag=fixture_name, fields=[FixtureTypeField(field_name=field) for field in fields], ) data_type.save() with IterDB(FixtureDataItem.get_db(), chunksize=1000) as iter_db: for vals in rows: fixture_data_item = _mk_fixture_data_item( domain, data_type._id, fields, vals) iter_db.save(fixture_data_item)