def main(): # Print DHIS2 Info logger.warn("You are running on DHIS2 version {} revision {} - " "Last Analytics generation was at: {}".format( api.version, api.revision, api.info.get('lastAnalyticsTableSuccess'))) # GET dataElements that contain ANC in its name params = {'filter': 'name:like:ANC', 'paging': False, 'fields': ':owner'} data_elements = api.get('dataElements', params=params).json() # Loop through each dataElement for de in data_elements['dataElements']: # Add (updated) to the name de['name'] = '{} (updated)'.format(de['name']) try: # Replace the dataElement on the server api.put('dataElements/{}'.format(de['id']), params={'mergeMode': 'REPLACE'}, json=de) except RequestException as e: # Print errors returned from DHIS2 logger.error("Updating DE '{}' ({}) failed: {}".format( de['name'], de['id'], e)) else: # Print success message logger.info("Updated DE '{}' ({}) successful".format( de['name'], de['id']))
def test_setup_logger_default(): from dhis2 import logger, setup_logger setup_logger() logger.info("info") logger.warn("warn") logger.debug("debug") logger.error("error")
def main(): setup_logger() args = parse_args() api = create_api(server=args.server, username=args.username, password=args.password) p = { 'fields': 'id,name,description,leftSide[expression],rightSide[expression]', 'paging': False } data = api.get('validationRules', params=p).json() uid_cache = set() for i, rule in enumerate(data['validationRules'], 1): info_msg = "{}/{} Analyzing Validation Rule '{}' ({})" logger.info( info_msg.format(i, len(data['validationRules']), rule['name'], rule['id'])) uids_in_expressions = extract_uids(rule) for uid in uids_in_expressions: if uid not in uid_cache: try: api.get('identifiableObjects/{}'.format(uid)).json() except APIException as exc: if exc.code == 404: logger.warn( "UID in expression not identified: {}".format(uid)) else: logger.error(exc) else: uid_cache.add(uid)
def check_validation_rules(api): p = { 'fields': 'id,name,description,leftSide[expression],rightSide[expression]', 'paging': False } data = api.get('validationRules', params=p).json() logger.info("*** CHECKING {} VALIDATION RULES... ***".format( len(data['validationRules']))) for rule in data['validationRules']: uid_cache = set() uids_in_expressions = extract_uids(rule) for uid in uids_in_expressions: if uid not in uid_cache: try: api.get('identifiableObjects/{}'.format(uid)).json() except RequestException as exc: if exc.code == 404: logger.warn( "Validation Rule '{}' ({}) - " "UID in expression not identified: {}".format( rule['name'], rule['id'], uid)) uid_cache.add(uid) else: logger.error(exc) else: uid_cache.add(uid)
def main(): # Print DHIS2 Info logger.warn( "You are running on DHIS2 version {} revision {} - " "Last Analytics generation was at: {}".format( api.version, api.revision, api.info.get("lastAnalyticsTableSuccess") ) ) # GET dataElements that contain ANC in its name params = {"filter": "name:like:ANC", "paging": False, "fields": ":owner"} data_elements = api.get("dataElements", params=params).json() # Loop through each dataElement for de in data_elements["dataElements"]: # Add (updated) to the name de["name"] = "{} (updated)".format(de["name"]) try: # Replace the dataElement on the server api.put( "dataElements/{}".format(de["id"]), params={"mergeMode": "REPLACE"}, json=de, ) except RequestException as e: # Print errors returned from DHIS2 logger.error( "Updating DE '{}' ({}) failed: {}".format(de["name"], de["id"], e) ) else: # Print success message logger.info("Updated DE '{}' ({}) successful".format(de["name"], de["id"]))
def extract_uids(rule): expressions = rule['leftSide']['expression'] + rule['rightSide'][ 'expression'] list_of_uids = re.findall(r'[A-Za-z][A-Za-z0-9]{10}', expressions) if not list_of_uids: logger.warn('Expression without UIDs. Check rule {}'.format( json.dumps(rule))) return list_of_uids
def check_category_combos(api): cat_combo = api.get('categoryCombos', params={ 'fields': 'id,name', 'paging': False }).json() logger.info("*** CHECKING {} CATEGORY COMBOS... ***".format( len(cat_combo['categoryCombos']))) for cc in cat_combo['categoryCombos']: data_elements_with_cc = api.get('dataElements', params={ 'fields': 'id,name', 'filter': 'categoryCombo.id:eq:{}'.format( cc['id']) }).json()['dataElements'] data_set_elements_with_cc = api.get( 'dataElements', params={ 'fields': 'id,name', 'filter': 'dataSetElements.categoryCombo.id:eq:{}'.format(cc['id']) }).json()['dataElements'] programs_with_cc = api.get('programs', params={ 'fields': 'id,name', 'filter': 'categoryCombo.id:eq:{}'.format( cc['id']) }).json()['programs'] datasets_with_cc = api.get('dataSets', params={ 'fields': 'id,name', 'filter': 'categoryCombo.id:eq:{}'.format( cc['id']) }).json()['dataSets'] if not any([ data_elements_with_cc, data_set_elements_with_cc, programs_with_cc, datasets_with_cc ]): logger.warn( "Category Combo '{}' ({}) is not assigned " "to any Data Element, Data Set Element, Program or Data Set". format(cc['name'], cc['id']))
def test_setup_logger_to_file(): from dhis2 import logger, setup_logger filename = os.path.join(tempfile.gettempdir(), 'logfile.log') setup_logger(logfile=filename) logger.info("info") logger.warn("warn") logger.debug("debug") logger.error("error") assert os.path.isfile(filename)
def check_option_sets(api): option_sets = api.get('optionSets', params={ 'fields': 'id,name,options', 'paging': False }).json() logger.info("*** CHECKING {} OPTION SETS... ***".format( len(option_sets['optionSets']))) [ logger.warn("Option Set '{}' ({}) has no options".format( o['name'], o['id'])) for o in option_sets['optionSets'] if not o.get('options') ] for option_set in option_sets['optionSets']: data_elements_with_optionset = api.get('dataElements', params={ 'fields': 'id,name', 'filter': 'optionSet.id:eq:{}'.format( option_set['id']) }).json()['dataElements'] tea_with_optionset = api.get('trackedEntityAttributes', params={ 'fields': 'id,name', 'filter': 'optionSet.id:eq:{}'.format( option_set['id']) }).json()['trackedEntityAttributes'] attributes_with_optionset = api.get('attributes', params={ 'fields': 'id,name', 'filter': 'optionSet.id:eq:{}'.format( option_set['id']) }).json()['attributes'] if not any([ data_elements_with_optionset, tea_with_optionset, attributes_with_optionset ]): logger.warn( "Option Set '{}' ({}) is not assigned " "to any Data Element, Tracked Entity Attribute or Attribute". format(option_set['name'], option_set['id']))
def check_opening_date(org_unit): ou_name = org_unit["name"] ou_uid = org_unit["id"] ou_opening_date = org_unit["openingDate"][:-13] # parse the opening date to a Python date opening_date = datetime.strptime(ou_opening_date, "%Y-%m-%d") msg = "Organisation Unit '{}' ({}) was opened {} 1990-01-01 on {}" # compare date and print message if opening_date > datetime(year=1990, month=1, day=1): logger.warn(msg.format(ou_name, ou_uid, "AFTER", ou_opening_date)) else: logger.debug(msg.format(ou_name, ou_uid, "BEFORE", ou_opening_date))
def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) p = { 'paging': False, 'filter': 'name:like:HNQIS', 'fields': 'id,name' } programs = api.get('programs', params=p) print("event_date,program,name,event,_OverallScore,0CS-100,diff") fix_them = [] csparams = { 'filter': ['shortName:like:.0CS-100', 'name:!ilike:_DEL'], 'paging': False, 'fields': 'id' } root_compscores = [x['id'] for x in api.get('dataElements', params=csparams).json()['dataElements']] for p in programs['programs']: params = { 'program': p['id'], 'skipPaging': True, 'fields': '[*]' } events = api.get('events', params=params).json() for event in events['events']: if analyze_event(p, event, root_compscores): fix_them.append(event) if fix_them and args.fix_values: logger.info(u"Fixing those events and resetting _Order Forward...") for i, e in enumerate(fix_them, 1): fixed = fix_event(e, root_compscores) logger.info(u"[{}/{}] Pushing event {}...".format(i, len(fix_them), e['event'])) api.put('events/{}'.format(e['event']), data=fixed) else: logger.warn(u"Not fixing events")
def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) if not is_valid_uid(args.attribute_uid): logger.error("Attribute {} is not a valid UID".format(args.attribute_uid)) data = list(load_csv(args.source_csv)) validate_csv(data) attr_get = {'fields': 'id,name,{}Attribute'.format(args.object_type[:-1])} attr = api.get('attributes/{}'.format(args.attribute_uid), params=attr_get).json() if attr['{}Attribute'.format(args.object_type[:-1])] is False: logger.error("Attribute {} is not assigned to type {}".format(args.attribute_uid, args.object_type[:-1])) logger.info( "[{}] - Updating Attribute Values for Attribute \033[1m{}\033[0m for \033[1m{}\033[0m \033[1m{}\033[0m...".format( args.server, args.attribute_uid, len(data), args.object_type)) try: time.sleep(3) except KeyboardInterrupt: logger.warn("\033[1m{}\033[0m".format("Aborted!")) pass for i, obj in enumerate(data, 1): obj_uid = obj.get('key') attribute_value = obj.get('value') params_get = {'fields': ':owner'} obj_old = api.get('{}/{}'.format(args.object_type, obj_uid), params=params_get).json() obj_updated = create_or_update_attributevalues(obj=obj_old, attribute_uid=args.attribute_uid, attribute_value=attribute_value) api.put('{}/{}'.format(args.object_type, obj_uid), params=None, data=obj_updated) logger.info(u"{}/{} - Updated AttributeValue: {} - {}: {}".format(i, len(data), attribute_value, args.object_type[:-1], obj_uid))
def get_objects(self): params = { 'fields': 'id,name,code,publicAccess,userGroupAccesses', 'paging': False } split = None if self.filters: split = self.filters.split(self.delimiter) params['filter'] = split if self.root_junction == 'OR': params['rootJunction'] = self.root_junction response = self.api.get(self.plural, params=params) if response: amount = len(response.json()[self.plural]) if amount > 0: if amount == 1: name = self.name else: name = self.plural if self.filters: print_msg = u"Sharing {} {} with filter [{}]" logger.info( print_msg.format( amount, name, " {} ".format(self.root_junction).join(split))) else: print_msg = u"Sharing *ALL* {} {} (no filters set!). Continuing in 10 seconds..." logger.warn(print_msg.format(amount, name)) time.sleep(10) return response else: logger.warning(u'No {} found - check your filter'.format( self.plural)) sys.exit(0)
def check_categories(api): categories = api.get('categories', params={ 'fields': 'id,name,categoryCombos', 'paging': False }).json() logger.info("*** CHECKING {} CATEGORIES... ***".format( len(categories['categories']))) [ logger.warn("Category '{}' ({}) is not in any Category Combo".format( c['name'], c['id'])) for c in categories['categories'] if not c.get('categoryCombos') ]
def check_category_options(api): category_options = api.get('categoryOptions', params={ 'fields': 'id,name,categories', 'paging': False }).json() logger.info("*** CHECKING {} CATEGORY OPTIONS... ***".format( len(category_options['categoryOptions']))) [ logger.warn("Category Option '{}' ({}) is not in any Category".format( co['name'], co['id'])) for co in category_options['categoryOptions'] if not co.get('categories') ]
def validate_file(filename): if not os.path.exists(filename): raise PKClientException("File does not exist: {}".format(filename)) if not os.path.getsize(filename) > 0: raise PKClientException("File is empty: {}".format(filename)) def main(): args, password = parse_args() setup_logger(include_caller=False) api = create_api(server=args.server, username=args.username, password=password) validate_file(args.css) post_file(api, filename=args.css) logger.info( "{} CSS posted to {}. Clear your Browser cache / use Incognito.". format(args.css, api.api_url)) if __name__ == "__main__": try: main() except KeyboardInterrupt: logger.warn("Aborted.") except PKClientException as e: logger.error(e) except Exception as e: logger.exception(e)
def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) if '.psi-mis.org' not in args.server and '.hnqis.org' not in args.server: logger.warn("This script is intended only for *.psi-mis.org or *.hnqis.org") sys.exit(0) indicators = {} backup_indicators = [] container = [] for ha in HEALTH_AREAS: # VMMC splits do not have their own HA if ha == 'VMMC': p1 = { 'paging': False, 'filter': [ 'name:like:HNQIS - {}'.format(ha), 'name:like$:count', 'program.name:!like:v1' # don't get v1 programIndicators ], 'fields': '[id,name]' } else: p1 = { 'paging': False, 'filter': [ 'name:like:HNQIS - {} count'.format(ha), 'program.name:!like:v1' # don't get v1 programIndicators ], 'fields': '[id,name]' } data1 = api.get('programIndicators', params=p1).json() pi_uids = [p['id'] for p in data1['programIndicators']] p2 = { 'paging': False, 'filter': ['name:eq:HNQIS - {} count'.format(ha)], 'fields': ':owner' } data2 = api.get('indicators', params=p2).json() backup_indicators.append(data2['indicators']) if ha == 'VMMC': p3 = { 'paging': False, 'filter': [ 'shortName:like: HNQIS {}'.format(ha), 'name:!like:v1' ], 'fields': 'id,name' } else: p3 = { 'paging': False, 'filter': [ 'shortName:like: HNQIS {}'.format(ha), # 2.30 would need to change filters 'name:!like:v1' ], 'fields': 'id,name' } data3 = api.get('programs', params=p3).json() no_of_programs = len(data3['programs']) if no_of_programs != len(pi_uids): print(u"\033[1mWarning\033[1m\033[0m - number of {} programs ({}) " u"does not match number of 'count' programIndicators ({})!".format(ha, no_of_programs, len(pi_uids))) print("\n".join([x['name'] for x in data3['programs']])) if len(data2['indicators']) == 1: i = data2['indicators'][0] i['numerator'] = create_numerator(pi_uids) container.append(i) print(u' \033[1m{}\033[0m - Added {} programIndicators to numerator of indicator "{}"'.format(ha, len(pi_uids), i['name'])) elif len(data2['indicators']) > 1: print(u"\033[1mMore than one indicator found for health area {}\033[0m".format(ha)) elif len(pi_uids) != 0: print(u"\033[1mNo indicator found for health area {}\033[0m".format(ha)) dump_to_file(backup_indicators) indicators['indicators'] = container print(u"Posting updated programindicators to \033[1m{}\033[0m...".format(args.server)) time.sleep(3) api.post('metadata', params={'importMode': 'COMMIT', 'preheatCache': False}, data=indicators)