def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) data = list(load_csv(args.source_csv)) validate_csv(data) programs_csv = [h.strip() for h in data[0] if h != 'orgunit'] if not programs_csv: raise ValueError('No programs found') params_get = {'fields': 'id', 'paging': False} programs_server = [ p['id'] for p in api.get('programs', params=params_get).json()['programs'] ] for p in programs_csv: if p not in programs_server: logger.error( u"Program {0} is not a valid program: {1}/programs/{0}.json". format(p, api.api_url)) program_orgunit_map = get_program_orgunit_map(data) metadata_payload = [] final = {} for program_uid, orgunit_list in iteritems(program_orgunit_map): params_get = {'fields': ':owner'} program = api.get('programs/{}'.format(program_uid), params=params_get).json() updated = set_program_orgunits(program, orgunit_list, args.append_orgunits) metadata_payload.append(updated) with open('backup_{}.json'.format(program_uid), 'w') as f: json.dump(program, f, indent=4) print(u"[{}] - Assigning \033[1m{} (total: {})\033[0m " u"OrgUnits to Program \033[1m{}\033[0m...".format( args.server, len(orgunit_list), len(program['organisationUnits']), program['name'])) final['programs'] = [updated] params_post = {"mergeMode": "REPLACE", "strategy": "UPDATE"} api.post(endpoint='metadata', params=params_post, data=final)
def make_category_options_public(self, credentials): api = Api(credentials.url, credentials.login, credentials.password) for page in api.get_paged("categoryOptions", params={"fields": ":all"}, page_size=100): for category_option in page["categoryOptions"]: if category_option["name"] != "default": try: api.post( "sharing?type=categoryOption&id=" + category_option["id"], { "meta": {"allowPublicAccess": True, "allowExternalAccess": False}, "object": { "id": category_option["id"], "name": category_option["name"], "displayName": category_option["displayName"], "publicAccess": "rwrw----", "user": category_option["user"], "externalAccess": False, }, }, ) except Exception as e: print("Failed to fix ", category_option["name"], e)
def sync_data_to_dhis2(): data_value_sets_to_delete = [] data_value_sets = DataValueSet.objects.filter( updated_at__lte=datetime.now(tz=timezone.utc) + timedelta(hours=1)) for dvs in data_value_sets: payload = {} api = Api(dvs.user.instance.url, dvs.user.instance.username, dvs.user.instance.password) payload['dataSet'] = dvs.data_set.dataset_id if dvs.mark_as_complete: payload['completeDate'] = json.dumps(dvs.created_at, sort_keys=True, indent=1, cls=DjangoJSONEncoder) payload['period'] = dvs.period payload['orgUnit'] = dvs.org_unit.org_unit_id payload['dataValues'] = [] for dv in dvs.datavalue_set.all(): p = { "dataElement": dv.data_element.data_element_id, "categoryOptionCombo": dv.category_option_combo.category_option_combo_id, "value": dv.value, "comment": "" } payload['dataValues'].append(p) try: response = api.post('dataValueSets', json=payload, params={ "dataSet": dvs.data_set.dataset_id, "orgUnit": dvs.org_unit.org_unit_id, "period": dvs.period }) if response.status_code == 200: data_value_sets_to_delete.append(dvs.pk) except RequestException as ex: logger.error(ex) logger.info("Syncing data complete") for dvs_id in data_value_sets_to_delete: dvs = DataValueSet.objects.get_or_none(pk=dvs_id) if dvs is not None: dvs.delete() if len(data_value_sets_to_delete) > 0: logger.info("Removing data value sets complete")
def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) if '.psi-mis.org' not in args.server and '.hnqis.org' not in args.server: logger.warn("This script is intended only for *.psi-mis.org or *.hnqis.org") sys.exit(0) indicators = {} backup_indicators = [] container = [] for ha in HEALTH_AREAS: # VMMC splits do not have their own HA if ha == 'VMMC': p1 = { 'paging': False, 'filter': [ 'name:like:HNQIS - {}'.format(ha), 'name:like$:count', 'program.name:!like:v1' # don't get v1 programIndicators ], 'fields': '[id,name]' } else: p1 = { 'paging': False, 'filter': [ 'name:like:HNQIS - {} count'.format(ha), 'program.name:!like:v1' # don't get v1 programIndicators ], 'fields': '[id,name]' } data1 = api.get('programIndicators', params=p1).json() pi_uids = [p['id'] for p in data1['programIndicators']] p2 = { 'paging': False, 'filter': ['name:eq:HNQIS - {} count'.format(ha)], 'fields': ':owner' } data2 = api.get('indicators', params=p2).json() backup_indicators.append(data2['indicators']) if ha == 'VMMC': p3 = { 'paging': False, 'filter': [ 'shortName:like: HNQIS {}'.format(ha), 'name:!like:v1' ], 'fields': 'id,name' } else: p3 = { 'paging': False, 'filter': [ 'shortName:like: HNQIS {}'.format(ha), # 2.30 would need to change filters 'name:!like:v1' ], 'fields': 'id,name' } data3 = api.get('programs', params=p3).json() no_of_programs = len(data3['programs']) if no_of_programs != len(pi_uids): print(u"\033[1mWarning\033[1m\033[0m - number of {} programs ({}) " u"does not match number of 'count' programIndicators ({})!".format(ha, no_of_programs, len(pi_uids))) print("\n".join([x['name'] for x in data3['programs']])) if len(data2['indicators']) == 1: i = data2['indicators'][0] i['numerator'] = create_numerator(pi_uids) container.append(i) print(u' \033[1m{}\033[0m - Added {} programIndicators to numerator of indicator "{}"'.format(ha, len(pi_uids), i['name'])) elif len(data2['indicators']) > 1: print(u"\033[1mMore than one indicator found for health area {}\033[0m".format(ha)) elif len(pi_uids) != 0: print(u"\033[1mNo indicator found for health area {}\033[0m".format(ha)) dump_to_file(backup_indicators) indicators['indicators'] = container print(u"Posting updated programindicators to \033[1m{}\033[0m...".format(args.server)) time.sleep(3) api.post('metadata', params={'importMode': 'COMMIT', 'preheatCache': False}, data=indicators)
"shortName": "Dummy data placeholder", "aggregationType": "NONE", "domainType": "AGGREGATE", "publicAccess": "--------", "externalAccess": False, "valueType": "NUMBER", "zeroIsSignificant": False, "favorite": False, "optionSetValue": False, } # First, create the data element try: response = api.post('metadata', params={ 'mergeMode': 'REPLACE', 'importStrategy': 'CREATE_AND_UPDATE' }, json={'dataElements': [dummy_data_de]}) except RequestException as e: # Print errors returned from DHIS2 logger.error("POST failed with error " + str(e)) exit() else: print('Data element ' + de_uid + ' created') # Get OU level 1 try: ou = api.get('organisationUnits', params={ 'fields': 'id,name', 'filter': 'level:eq:1'