def test_setup_logger_default(): from dhis2 import logger, setup_logger setup_logger() logger.info("info") logger.warning("warn") logger.debug("debug") logger.error("error")
def main(): setup_logger() args = parse_args() api = create_api(server=args.server, username=args.username, password=args.password) p = { 'fields': 'id,name,description,leftSide[expression],rightSide[expression]', 'paging': False } data = api.get('validationRules', params=p).json() uid_cache = set() for i, rule in enumerate(data['validationRules'], 1): info_msg = "{}/{} Analyzing Validation Rule '{}' ({})" logger.info( info_msg.format(i, len(data['validationRules']), rule['name'], rule['id'])) uids_in_expressions = extract_uids(rule) for uid in uids_in_expressions: if uid not in uid_cache: try: api.get('identifiableObjects/{}'.format(uid)).json() except APIException as exc: if exc.code == 404: logger.warn( "UID in expression not identified: {}".format(uid)) else: logger.error(exc) else: uid_cache.add(uid)
def main(): args, password = parse_args() setup_logger(include_caller=False) api = create_api(server=args.server, username=args.username, password=password) validate_file(args.css) post_file(api, filename=args.css) logger.info( "{} CSS posted to {}. Clear your Browser cache / use Incognito.". format(args.css, api.api_url))
def test_setup_logger_to_file(): from dhis2 import logger, setup_logger filename = os.path.join(tempfile.gettempdir(), 'logfile.log') setup_logger(logfile=filename) logger.info("info") logger.warning("warn") logger.debug("debug") logger.error("error") assert os.path.isfile(filename)
def main(): setup_logger(include_caller=False) args, password = parse_args() api = create_api(server=args.server, username=args.username, password=password) check_validation_rules(api) check_option_sets(api) check_category_options(api) check_categories(api) check_category_combos(api)
def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) data = list(load_csv(args.source_csv)) validate_csv(data) programs_csv = [h.strip() for h in data[0] if h != 'orgunit'] if not programs_csv: raise ValueError('No programs found') params_get = {'fields': 'id', 'paging': False} programs_server = [ p['id'] for p in api.get('programs', params=params_get).json()['programs'] ] for p in programs_csv: if p not in programs_server: logger.error( u"Program {0} is not a valid program: {1}/programs/{0}.json". format(p, api.api_url)) program_orgunit_map = get_program_orgunit_map(data) metadata_payload = [] final = {} for program_uid, orgunit_list in iteritems(program_orgunit_map): params_get = {'fields': ':owner'} program = api.get('programs/{}'.format(program_uid), params=params_get).json() updated = set_program_orgunits(program, orgunit_list, args.append_orgunits) metadata_payload.append(updated) with open('backup_{}.json'.format(program_uid), 'w') as f: json.dump(program, f, indent=4) print(u"[{}] - Assigning \033[1m{} (total: {})\033[0m " u"OrgUnits to Program \033[1m{}\033[0m...".format( args.server, len(orgunit_list), len(program['organisationUnits']), program['name'])) final['programs'] = [updated] params_post = {"mergeMode": "REPLACE", "strategy": "UPDATE"} api.post(endpoint='metadata', params=params_post, data=final)
def main(): setup_logger() args, password = parse_args() api = create_api(server=args.server, username=args.username, password=password) params1 = { 'fields': 'name,' 'userCredentials[username,lastLogin,userRoles[name],userInfo[phoneNumber,firstName,surname]],' 'organisationUnits[path],userGroups[name],' 'dataViewOrganisationUnits[path]', 'paging': False } users = api.get(endpoint='users', params=params1).json() params2 = {'fields': 'id,name', 'paging': False} ou_map = { ou['id']: ou['name'] for ou in api.get(endpoint='organisationUnits', params=params2).json() ['organisationUnits'] } file_name = "userinfo-{}.csv".format(file_timestamp(api.api_url)) data = [] header_row = [ 'name', 'firstName', 'surname', 'username', 'phoneNumber', 'lastLogin', 'userGroups', 'userRoles', 'orgunitPaths', 'dataViewOrgunitPaths' ] for user in format_user(users, ou_map): data.append([ user.name, user.first_name, user.surname, user.username, user.phone_number, user.last_login, user.user_groups, user.user_roles, user.org_units, user.dv_org_units ]) write_csv(data, file_name, header_row) logger.info("Success! CSV file exported to {}".format(file_name))
def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) p = { 'paging': False, 'filter': 'name:like:HNQIS', 'fields': 'id,name' } programs = api.get('programs', params=p) print("event_date,program,name,event,_OverallScore,0CS-100,diff") fix_them = [] csparams = { 'filter': ['shortName:like:.0CS-100', 'name:!ilike:_DEL'], 'paging': False, 'fields': 'id' } root_compscores = [x['id'] for x in api.get('dataElements', params=csparams).json()['dataElements']] for p in programs['programs']: params = { 'program': p['id'], 'skipPaging': True, 'fields': '[*]' } events = api.get('events', params=params).json() for event in events['events']: if analyze_event(p, event, root_compscores): fix_them.append(event) if fix_them and args.fix_values: logger.info(u"Fixing those events and resetting _Order Forward...") for i, e in enumerate(fix_them, 1): fixed = fix_event(e, root_compscores) logger.info(u"[{}/{}] Pushing event {}...".format(i, len(fix_them), e['event'])) api.put('events/{}'.format(e['event']), data=fixed) else: logger.warn(u"Not fixing events")
def main(): setup_logger(include_caller=False) args, password = parse_args() api = create_api(server=args.server, username=args.username, password=password, api_version=args.api_version) file_name = '{}-{}.csv'.format(args.indicator_type, file_timestamp(api.api_url)) if args.indicator_type == 'indicators': fields = ','.join( [x for x in indicator_fields.values() if x != 'type']) elif args.indicator_type == 'programIndicators': fields = ','.join([ x for x in program_indicator_fields.values() if x not in ('type', 'program_name') ]) else: raise SystemExit('Cannot process argument -t {}'.format( args.indicator_type)) indicators = api.get(endpoint=args.indicator_type, params=get_params(args.indicator_filter, fields)).json() message = analyze_result(args.indicator_type, indicators, args.indicator_filter) logger.info(message) logger.info("Analyzing metadata...") object_mapping = object_map(api) write_to_csv(api, args.indicator_type, indicators, object_mapping, file_name)
def main(): setup_logger() args = parse_args() api = create_api(server=args.server, username=args.username, password=args.password) Attribute = namedtuple('Attribute', 'uid name') Attribute.uid = args.attribute_uid Attribute.name = get_attribute_name(api, args.attribute_uid) typ = args.object_type attribute_is_on_model(api, Attribute, typ) data = list(load_csv(args.source_csv)) validate_csv(data) logger.info(u"Updating values for Attribute '{}' ({}) on {} {} ...".format( Attribute.name, Attribute.uid, len(data), typ)) for i in range(3, 0, -1): time.sleep(i) print('Proceeding in {}...'.format(i)) for i, obj in enumerate(data, 1): obj_uid = obj['uid'] attribute_value = obj['attributeValue'] obj_old = api.get('{}/{}'.format(args.object_type, obj_uid), params={ 'fields': ':owner' }).json() obj_updated = create_or_update_attribute_values( obj_old, Attribute.uid, attribute_value) api.put('{}/{}'.format(typ, obj_uid), data=obj_updated) logger.info(u"{}/{} - Updated AttributeValue: {} - {}: {}".format( i, len(data), attribute_value, typ[:-1], obj_uid))
def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) if not is_valid_uid(args.attribute_uid): logger.error("Attribute {} is not a valid UID".format(args.attribute_uid)) data = list(load_csv(args.source_csv)) validate_csv(data) attr_get = {'fields': 'id,name,{}Attribute'.format(args.object_type[:-1])} attr = api.get('attributes/{}'.format(args.attribute_uid), params=attr_get).json() if attr['{}Attribute'.format(args.object_type[:-1])] is False: logger.error("Attribute {} is not assigned to type {}".format(args.attribute_uid, args.object_type[:-1])) logger.info( "[{}] - Updating Attribute Values for Attribute \033[1m{}\033[0m for \033[1m{}\033[0m \033[1m{}\033[0m...".format( args.server, args.attribute_uid, len(data), args.object_type)) try: time.sleep(3) except KeyboardInterrupt: logger.warn("\033[1m{}\033[0m".format("Aborted!")) pass for i, obj in enumerate(data, 1): obj_uid = obj.get('key') attribute_value = obj.get('value') params_get = {'fields': ':owner'} obj_old = api.get('{}/{}'.format(args.object_type, obj_uid), params=params_get).json() obj_updated = create_or_update_attributevalues(obj=obj_old, attribute_uid=args.attribute_uid, attribute_value=attribute_value) api.put('{}/{}'.format(args.object_type, obj_uid), params=None, data=obj_updated) logger.info(u"{}/{} - Updated AttributeValue: {} - {}: {}".format(i, len(data), attribute_value, args.object_type[:-1], obj_uid))
def main(): setup_logger(include_caller=False) args = parse_args() if args.logging_to_file: if args.debug: setup_logger(logfile=args.logging_to_file, log_level=DEBUG, include_caller=True) else: setup_logger(logfile=args.logging_to_file, include_caller=False) elif args.debug: setup_logger(log_level=DEBUG, include_caller=True) api = create_api(server=args.server, username=args.username, password=args.password, api_version=args.api_version) validate_args(args, api.version_int) public_access = Permission.from_public_args(args.public_access) collection = ShareableObjectCollection(api, args.object_type, args.filter) usergroups = UserGroupsCollection(api, args.groups) validate_data_access(public_access, collection, usergroups, api.version_int) logger.info(u"Public access ➜ {}".format(public_access)) for i, element in enumerate(collection.elements, 1): update = ShareableObject(obj_type=element.obj_type, uid=element.uid, name=element.name, code=element.code, public_access=public_access, usergroup_accesses=usergroups.accesses) pointer = u"{0}/{1} {2} {3}".format(i, len(collection.elements), collection.name, element.uid) if not skip(args.overwrite, element, update): logger.info(u"{0} {1}".format(pointer, element.log_identifier)) share(api, update) else: logger.warning(u'Not overwriting: {0} {1}'.format( pointer, element.log_identifier))
from dhis2 import Api, RequestException, setup_logger, logger, load_json, import_response_ok """ Import a metadata JSON file from your computer. """ # Create a Api object api = Api("play.dhis2.org/dev", "admin", "district") # setup the logger setup_logger(include_caller=False) def main(): # load the JSON file that sits next to the script data = load_json("2_import_metadata.json") try: # import metadata r = api.post( "metadata.json", params={"preheatCache": False, "strategy": "CREATE"}, json=data, timeout=(3, 30) ) except RequestException as e: logger.error("Import failed: {}".format(e)) else: if import_response_ok(r.json()): logger.info("Import successful!") else:
def main(): my_parser = argparse.ArgumentParser(description='dashboard_checker') my_parser.add_argument('-i', '--instance', action="store", dest="instance", type=str, help='URL of the instance to process') my_parser.add_argument( '-df', '--dashboard_filter', action="store", dest="dashboard_filter", type=str, help='Either a prefix or a list of comma separated UIDs') my_parser.add_argument('--no_data_warning', dest='no_data_warning', action='store_true') my_parser.add_argument('--omit-no_data_warning', dest='no_data_warning', action='store_false') my_parser.add_argument('-v', '--verbose', dest='verbose', action='store_true') my_parser.set_defaults(no_data_warning=True) my_parser.set_defaults(verbose=False) args = my_parser.parse_args() if args.instance is not None: instances = [{ 'name': args.instance.split('/')[-1].replace(':', '_'), 'url': args.instance }] else: instances = [ #{'name':'newdemos', 'url':'https://who-demos.dhis2.org/newdemos', 'SQL_view_TRK':'xfemQFHUTUV', 'SQL_view_AGG':'lg8lFbDMw2Z'} #{'name':'tracker_dev', 'url': 'https://who-dev.dhis2.org/tracker_dev', 'SQL_view_TRK': 'xfemQFHUTUV', 'SQL_view_AGG': 'lg8lFbDMw2Z'} { 'name': 'covid-19', 'url': 'https://demos.dhis2.org/covid-19', 'SQL_view_TRK': 'xfemQFHUTUV', 'SQL_view_AGG': 'lg8lFbDMw2Z' } ] log_file = "./dashboard_checker.log" setup_logger(log_file) credentials_file = './auth.json' df = pd.DataFrame({}, columns=[ 'dashboard_name', 'type', 'uid', 'name', 'issue', 'api_link', 'app_link' ]) errors_found = 0 for instance in instances: try: f = open(credentials_file) except IOError: print( "Please provide file auth.json with credentials for DHIS2 server" ) exit(1) else: with open(credentials_file, 'r') as json_file: credentials = json.load(json_file) api_source = Api(instance['url'], credentials['dhis']['username'], credentials['dhis']['password']) # Get dashboards params = {"fields": "*", "paging": "false"} if args.dashboard_filter is not None: item_list = args.dashboard_filter.split(',') if len(item_list) == 1 and not is_valid_uid(item_list[0]): params["filter"] = "name:$like:" + args.dashboard_filter # Let's consider it as a list of uids else: # Validate the list for item in item_list: if not is_valid_uid(item): logger.error("UID " + item + " is not a valid DHIS2 UID") exit(1) params["filter"] = "id:in:[" + args.dashboard_filter + "]" dashboards = api_source.get('dashboards', params=params).json()['dashboards'] dashboard_item_with_issues_row = dict() for dashboard in dashboards: logger.info('Processing dashboard ' + dashboard['name']) dashboard_item_with_issues_row['dashboard_name'] = dashboard[ 'name'] if '2.33' not in api_source.version: dashboard_items = [ 'visualization', 'eventReport', 'eventChart', 'map' ] else: dashboard_items = [ 'chart', 'reportTable', 'eventReport', 'eventChart', 'map' ] for dashboardItem in dashboard['dashboardItems']: # The dashboard item could be of type TEXT, for example # in this case there is nothing to do dashboard_item_type_found = False for dashboard_item in dashboard_items: if dashboard_item in dashboardItem: dashboard_item_type_found = True dashboard_item_with_issues_row['issue'] = "" dashboard_item_with_issues_row['type'] = dashboard_item dashboard_item_with_issues_row['uid'] = dashboardItem[ dashboard_item]['id'] dashboard_item_with_issues_row['name'] = "" if args.verbose: logger.info('Trying ' + dashboard_item + ' ' + dashboardItem[dashboard_item]['id']) try: api_endpoint = dashboard_item + 's/' + dashboardItem[ dashboard_item]['id'] dashboard_item_with_issues_row[ 'api_link'] = instance[ 'url'] + '/api/' + api_endpoint item = api_source.get(api_endpoint, params={ "fields": "*" }).json() except RequestException as e: logger.error(dashboard_item + ' ' + dashboardItem[dashboard_item]['id'] + " BROKEN with error " + str(e)) dashboard_item_with_issues_row['issue'] = str(e) errors_found += 1 else: dashboard_item_with_issues_row['name'] = item[ 'name'] if dashboard_item in ['eventReport', 'eventChart']: continue # Try to get the data try: if dashboard_item == 'map': for map_view in item['mapViews']: params = build_analytics_payload( map_view, args.verbose) if params != {}: if 'layer' in map_view and map_view[ 'layer'] == 'event' and 'program' in map_view: data = api_source.get( 'analytics/events/query/' + map_view['program']['id'], params=params).json() else: data = api_source.get( 'analytics', params=params).json() else: data = api_source.get( 'analytics', params=build_analytics_payload( item, args.verbose)).json() except RequestException as e: logger.error( dashboard_item + ' ' + dashboardItem[dashboard_item]['id'] + " data cannot be retrieved with error " + str(e)) dashboard_item_with_issues_row['issue'] = str( e) errors_found += 1 else: # print(data['rows']) if args.no_data_warning and ( 'rows' not in data or len(data['rows']) == 0): dashboard_item_with_issues_row[ 'issue'] = 'NO DATA' logger.warning( dashboardItem[dashboard_item]['id'] + ': NO DATA!!!') #exit(0) if dashboard_item_type_found and dashboard_item_with_issues_row[ 'issue'] != "": if dashboard_item_with_issues_row[ 'type'] == 'visualization': dashboard_item_with_issues_row['app_link'] = instance['url'] + \ '/dhis-web-data-visualizer/index.html#/' + \ dashboard_item_with_issues_row['uid'] elif dashboard_item_with_issues_row['type'] == 'map': dashboard_item_with_issues_row['app_link'] = instance['url'] + \ '/dhis-web-maps/index.html' elif dashboard_item_with_issues_row[ 'type'] == 'eventReport': dashboard_item_with_issues_row['app_link'] = instance['url'] + \ 'dhis-web-event-reports/index.html?id=' + \ dashboard_item_with_issues_row['uid'] elif dashboard_item_with_issues_row[ 'type'] == 'eventChart': dashboard_item_with_issues_row['app_link'] = instance['url'] + \ '/dhis-web-event-visualizer/index.html?id=' + \ dashboard_item_with_issues_row['uid'] df = df.append(dashboard_item_with_issues_row, ignore_index=True) export_csv = df.to_csv(instance['name'] + '.csv', index=None, header=True) # Release log handlers handlers = logger.handlers[:] for handler in handlers: handler.close() logger.removeHandler(handler) return errors_found
def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) if '.psi-mis.org' not in args.server and '.hnqis.org' not in args.server: logger.warn("This script is intended only for *.psi-mis.org or *.hnqis.org") sys.exit(0) indicators = {} backup_indicators = [] container = [] for ha in HEALTH_AREAS: # VMMC splits do not have their own HA if ha == 'VMMC': p1 = { 'paging': False, 'filter': [ 'name:like:HNQIS - {}'.format(ha), 'name:like$:count', 'program.name:!like:v1' # don't get v1 programIndicators ], 'fields': '[id,name]' } else: p1 = { 'paging': False, 'filter': [ 'name:like:HNQIS - {} count'.format(ha), 'program.name:!like:v1' # don't get v1 programIndicators ], 'fields': '[id,name]' } data1 = api.get('programIndicators', params=p1).json() pi_uids = [p['id'] for p in data1['programIndicators']] p2 = { 'paging': False, 'filter': ['name:eq:HNQIS - {} count'.format(ha)], 'fields': ':owner' } data2 = api.get('indicators', params=p2).json() backup_indicators.append(data2['indicators']) if ha == 'VMMC': p3 = { 'paging': False, 'filter': [ 'shortName:like: HNQIS {}'.format(ha), 'name:!like:v1' ], 'fields': 'id,name' } else: p3 = { 'paging': False, 'filter': [ 'shortName:like: HNQIS {}'.format(ha), # 2.30 would need to change filters 'name:!like:v1' ], 'fields': 'id,name' } data3 = api.get('programs', params=p3).json() no_of_programs = len(data3['programs']) if no_of_programs != len(pi_uids): print(u"\033[1mWarning\033[1m\033[0m - number of {} programs ({}) " u"does not match number of 'count' programIndicators ({})!".format(ha, no_of_programs, len(pi_uids))) print("\n".join([x['name'] for x in data3['programs']])) if len(data2['indicators']) == 1: i = data2['indicators'][0] i['numerator'] = create_numerator(pi_uids) container.append(i) print(u' \033[1m{}\033[0m - Added {} programIndicators to numerator of indicator "{}"'.format(ha, len(pi_uids), i['name'])) elif len(data2['indicators']) > 1: print(u"\033[1mMore than one indicator found for health area {}\033[0m".format(ha)) elif len(pi_uids) != 0: print(u"\033[1mNo indicator found for health area {}\033[0m".format(ha)) dump_to_file(backup_indicators) indicators['indicators'] = container print(u"Posting updated programindicators to \033[1m{}\033[0m...".format(args.server)) time.sleep(3) api.post('metadata', params={'importMode': 'COMMIT', 'preheatCache': False}, data=indicators)
def main(): setup_logger(include_caller=False) args, password = parse_args() if args.logging_to_file: if args.debug: setup_logger(logfile=args.logging_to_file, log_level=DEBUG, include_caller=True) else: setup_logger(logfile=args.logging_to_file, include_caller=False) elif args.debug: setup_logger(log_level=DEBUG, include_caller=True) api = create_api(server=args.server, username=args.username, password=password, api_version=args.api_version) validate_args(args, api.version_int) public_access_permission = Permission.from_public_args(args.public_access) collection = ShareableObjectCollection(api, args.object_type, args.filter) usergroups = UserGroupsCollection(api, args.groups) validate_data_access(public_access_permission, collection, usergroups, api.version_int) # sort by name try: elements = sorted(collection.elements, key=operator.attrgetter('name')) except AttributeError: elements = collection.elements # handle log messages and collection-wide public access and usergroup access if applicable if args.extend: if not args.public_access: logger.warning(u"Public access {} INHERIT".format(ARROW)) else: logger.info(u"Public access {} {}".format( ARROW, public_access_permission)) public_access = Permission.from_public_args(args.public_access) logger.warning(u"Extending with additional User Groups...") else: logger.info(u"Public access {} {}".format(ARROW, public_access_permission)) public_access = Permission.from_public_args(args.public_access) usergroup_accesses = usergroups.accesses time.sleep(2) for i, element in enumerate(elements, 1): if args.extend: # merge user group accesses usergroup_accesses = merge(server_uga=element.usergroup_accesses, local_uga=usergroups.accesses) # if public access is not provided via argument, re-use public access from object on server if not args.public_access: public_access = element.public_access # no issue for public_access and usergroup_accesses since it's set above with same if/else check # to improve performance and allow for logical logging message placement # noinspection PyUnboundLocalVariable update = ShareableObject(obj_type=element.obj_type, uid=element.uid, name=element.name, code=element.code, public_access=public_access, usergroup_accesses=usergroup_accesses) pointer = u"{0}/{1} {2} {3}".format(i, len(collection.elements), collection.name, element.uid) if not skip(args.overwrite, element, update): logger.info(u"{0} {1}".format(pointer, element.log_identifier)) share(api, update) else: logger.warning(u'Skipping (already shared): {0} {1}'.format( pointer, element.log_identifier))
from dhis2 import Api, RequestException, setup_logger, logger """ Add "(updated)" to all Data Elements that contain "ANC" in its name. Uses the method PUT. Print errors if it failed. """ # Create a Api object api = Api('play.dhis2.org/dev', 'admin', 'district') # setup the logger setup_logger() def main(): # Print DHIS2 Info logger.warn("You are running on DHIS2 version {} revision {} - " "Last Analytics generation was at: {}".format( api.version, api.revision, api.info.get('lastAnalyticsTableSuccess'))) # GET dataElements that contain ANC in its name params = {'filter': 'name:like:ANC', 'paging': False, 'fields': ':owner'} data_elements = api.get('dataElements', params=params).json() # Loop through each dataElement for de in data_elements['dataElements']: # Add (updated) to the name de['name'] = '{} (updated)'.format(de['name']) try: