def test_setup_logger_default(): from dhis2 import logger, setup_logger setup_logger() logger.info("info") logger.warning("warn") logger.debug("debug") logger.error("error")
def post_to_server(api, jsonObject, apiObject='metadata', strategy='CREATE_AND_UPDATE'): try: response = api.post(apiObject, params={'mergeMode': 'REPLACE', 'importStrategy': strategy}, json=jsonObject) except RequestException as e: # Print errors returned from DHIS2 logger.error("metadata update failed with error " + str(e)) pass else: if response is None: logger.error("Error in response from server") return text = json.loads(response.text) # print(text) if text['status'] == 'ERROR': logger.error("Import failed!!!!\n" + json.dumps(text['typeReports'], indent=4, sort_keys=True)) return False # errorCode = errorReport['errorCode'] else: if apiObject == 'metadata': logger.info("metadata imported " + text['status'] + " " + json.dumps(text['stats'])) else: # logger.info("data imported " + text['status'] + " " + json.dumps(text['importCount'])) logger.info("Data imported\n" + json.dumps(text, indent=4, sort_keys=True)) if text['status'] == 'WARNING': logger.warning(text) return True
def test_setup_logger_to_file(): from dhis2 import logger, setup_logger filename = os.path.join(tempfile.gettempdir(), 'logfile.log') setup_logger(logfile=filename) logger.info("info") logger.warning("warn") logger.debug("debug") logger.error("error") assert os.path.isfile(filename)
def main(): setup_logger(include_caller=False) args = parse_args() if args.logging_to_file: if args.debug: setup_logger(logfile=args.logging_to_file, log_level=DEBUG, include_caller=True) else: setup_logger(logfile=args.logging_to_file, include_caller=False) elif args.debug: setup_logger(log_level=DEBUG, include_caller=True) api = create_api(server=args.server, username=args.username, password=args.password, api_version=args.api_version) validate_args(args, api.version_int) public_access = Permission.from_public_args(args.public_access) collection = ShareableObjectCollection(api, args.object_type, args.filter) usergroups = UserGroupsCollection(api, args.groups) validate_data_access(public_access, collection, usergroups, api.version_int) logger.info(u"Public access ➜ {}".format(public_access)) for i, element in enumerate(collection.elements, 1): update = ShareableObject(obj_type=element.obj_type, uid=element.uid, name=element.name, code=element.code, public_access=public_access, usergroup_accesses=usergroups.accesses) pointer = u"{0}/{1} {2} {3}".format(i, len(collection.elements), collection.name, element.uid) if not skip(args.overwrite, element, update): logger.info(u"{0} {1}".format(pointer, element.log_identifier)) share(api, update) else: logger.warning(u'Not overwriting: {0} {1}'.format( pointer, element.log_identifier))
def get_objects(self): """ Get the actual objects from DHIS 2 :return: requests response """ params = { 'fields': 'id,name,code,publicAccess,userGroupAccesses', 'paging': False } split = None if self.filters: split = self.filters.split(self.delimiter) params['filter'] = split if self.root_junction == 'OR': params['rootJunction'] = self.root_junction response = self.api.get(self.plural, params=params) if response: amount = len(response.json()[self.plural]) if amount > 0: if amount == 1: name = self.name else: name = self.plural if self.filters: print_msg = u"Sharing {} {} with filter [{}]" logger.info( print_msg.format( amount, name, " {} ".format(self.root_junction).join(split))) else: print_msg = u"Sharing *ALL* {} {} (no filters set!). Continuing in 10 seconds..." logger.warning(print_msg.format(amount, name)) time.sleep(10) return response else: logger.warning(u'No {} found - check your filter'.format( self.plural)) sys.exit(0)
def main(): setup_logger(include_caller=False) args, password = parse_args() if args.logging_to_file: if args.debug: setup_logger(logfile=args.logging_to_file, log_level=DEBUG, include_caller=True) else: setup_logger(logfile=args.logging_to_file, include_caller=False) elif args.debug: setup_logger(log_level=DEBUG, include_caller=True) api = create_api(server=args.server, username=args.username, password=password, api_version=args.api_version) validate_args(args, api.version_int) public_access_permission = Permission.from_public_args(args.public_access) collection = ShareableObjectCollection(api, args.object_type, args.filter) usergroups = UserGroupsCollection(api, args.groups) validate_data_access(public_access_permission, collection, usergroups, api.version_int) # sort by name try: elements = sorted(collection.elements, key=operator.attrgetter('name')) except AttributeError: elements = collection.elements # handle log messages and collection-wide public access and usergroup access if applicable if args.extend: if not args.public_access: logger.warning(u"Public access {} INHERIT".format(ARROW)) else: logger.info(u"Public access {} {}".format( ARROW, public_access_permission)) public_access = Permission.from_public_args(args.public_access) logger.warning(u"Extending with additional User Groups...") else: logger.info(u"Public access {} {}".format(ARROW, public_access_permission)) public_access = Permission.from_public_args(args.public_access) usergroup_accesses = usergroups.accesses time.sleep(2) for i, element in enumerate(elements, 1): if args.extend: # merge user group accesses usergroup_accesses = merge(server_uga=element.usergroup_accesses, local_uga=usergroups.accesses) # if public access is not provided via argument, re-use public access from object on server if not args.public_access: public_access = element.public_access # no issue for public_access and usergroup_accesses since it's set above with same if/else check # to improve performance and allow for logical logging message placement # noinspection PyUnboundLocalVariable update = ShareableObject(obj_type=element.obj_type, uid=element.uid, name=element.name, code=element.code, public_access=public_access, usergroup_accesses=usergroup_accesses) pointer = u"{0}/{1} {2} {3}".format(i, len(collection.elements), collection.name, element.uid) if not skip(args.overwrite, element, update): logger.info(u"{0} {1}".format(pointer, element.log_identifier)) share(api, update) else: logger.warning(u'Skipping (already shared): {0} {1}'.format( pointer, element.log_identifier))
# to improve performance and allow for logical logging message placement # noinspection PyUnboundLocalVariable update = ShareableObject(obj_type=element.obj_type, uid=element.uid, name=element.name, code=element.code, public_access=public_access, usergroup_accesses=usergroup_accesses) pointer = u"{0}/{1} {2} {3}".format(i, len(collection.elements), collection.name, element.uid) if not skip(args.overwrite, element, update): logger.info(u"{0} {1}".format(pointer, element.log_identifier)) share(api, update) else: logger.warning(u'Skipping (already shared): {0} {1}'.format( pointer, element.log_identifier)) if __name__ == "__main__": try: main() except KeyboardInterrupt: logger.warning("Aborted.") except PKClientException as e: logger.error(e) except Exception as e: logger.exception(e)
"?": "#63", "≤": "#le", "≥": "#ge", "|": "#124" } # Max length = 65 # For data entry forms we use a key key="dataEntryForm_bla_bla" # For Reports we will need to use a key key="htmlReport_bla_bla" return 'dataEntryForm_' + "".join(html_escape_table.get(c, c) for c in name.string.lower().strip().replace(" ", "_"))[:65] if __name__ == '__main__': logger.warning("Server source running DHIS2 version {} revision {}".format(api.version, api.revision)) import argparse my_parser = argparse.ArgumentParser(prog='translate_form', description='Manage translations for custom forms', epilog="It connects to instance using auth.json file and" "\n - Creates an excel with key - en translations for the custom forms present" "\n - Updates the custom form with the translations provided in the excel" "\nLegend for some warnings/errors:" "\n - Pair key / EN translations already in dictionary -> The EN string is used multiple times in the custom form but only one entry will be created in the dictionary", formatter_class=argparse.RawDescriptionHelpFormatter) my_parser.add_argument('-get', '--get_dict_from_form', action="store", metavar='file_name', const='dictionary_' + generate_uid(), nargs='?', help="Create dictionary of translatable string from custom forms in the instance." "\nOptionally, you can pass the name of the output file to create."
def main(): my_parser = argparse.ArgumentParser(description='dashboard_checker') my_parser.add_argument('-i', '--instance', action="store", dest="instance", type=str, help='URL of the instance to process') my_parser.add_argument( '-df', '--dashboard_filter', action="store", dest="dashboard_filter", type=str, help='Either a prefix or a list of comma separated UIDs') my_parser.add_argument('--no_data_warning', dest='no_data_warning', action='store_true') my_parser.add_argument('--omit-no_data_warning', dest='no_data_warning', action='store_false') my_parser.add_argument('-v', '--verbose', dest='verbose', action='store_true') my_parser.set_defaults(no_data_warning=True) my_parser.set_defaults(verbose=False) args = my_parser.parse_args() if args.instance is not None: instances = [{ 'name': args.instance.split('/')[-1].replace(':', '_'), 'url': args.instance }] else: instances = [ #{'name':'newdemos', 'url':'https://who-demos.dhis2.org/newdemos', 'SQL_view_TRK':'xfemQFHUTUV', 'SQL_view_AGG':'lg8lFbDMw2Z'} #{'name':'tracker_dev', 'url': 'https://who-dev.dhis2.org/tracker_dev', 'SQL_view_TRK': 'xfemQFHUTUV', 'SQL_view_AGG': 'lg8lFbDMw2Z'} { 'name': 'covid-19', 'url': 'https://demos.dhis2.org/covid-19', 'SQL_view_TRK': 'xfemQFHUTUV', 'SQL_view_AGG': 'lg8lFbDMw2Z' } ] log_file = "./dashboard_checker.log" setup_logger(log_file) credentials_file = './auth.json' df = pd.DataFrame({}, columns=[ 'dashboard_name', 'type', 'uid', 'name', 'issue', 'api_link', 'app_link' ]) errors_found = 0 for instance in instances: try: f = open(credentials_file) except IOError: print( "Please provide file auth.json with credentials for DHIS2 server" ) exit(1) else: with open(credentials_file, 'r') as json_file: credentials = json.load(json_file) api_source = Api(instance['url'], credentials['dhis']['username'], credentials['dhis']['password']) # Get dashboards params = {"fields": "*", "paging": "false"} if args.dashboard_filter is not None: item_list = args.dashboard_filter.split(',') if len(item_list) == 1 and not is_valid_uid(item_list[0]): params["filter"] = "name:$like:" + args.dashboard_filter # Let's consider it as a list of uids else: # Validate the list for item in item_list: if not is_valid_uid(item): logger.error("UID " + item + " is not a valid DHIS2 UID") exit(1) params["filter"] = "id:in:[" + args.dashboard_filter + "]" dashboards = api_source.get('dashboards', params=params).json()['dashboards'] dashboard_item_with_issues_row = dict() for dashboard in dashboards: logger.info('Processing dashboard ' + dashboard['name']) dashboard_item_with_issues_row['dashboard_name'] = dashboard[ 'name'] if '2.33' not in api_source.version: dashboard_items = [ 'visualization', 'eventReport', 'eventChart', 'map' ] else: dashboard_items = [ 'chart', 'reportTable', 'eventReport', 'eventChart', 'map' ] for dashboardItem in dashboard['dashboardItems']: # The dashboard item could be of type TEXT, for example # in this case there is nothing to do dashboard_item_type_found = False for dashboard_item in dashboard_items: if dashboard_item in dashboardItem: dashboard_item_type_found = True dashboard_item_with_issues_row['issue'] = "" dashboard_item_with_issues_row['type'] = dashboard_item dashboard_item_with_issues_row['uid'] = dashboardItem[ dashboard_item]['id'] dashboard_item_with_issues_row['name'] = "" if args.verbose: logger.info('Trying ' + dashboard_item + ' ' + dashboardItem[dashboard_item]['id']) try: api_endpoint = dashboard_item + 's/' + dashboardItem[ dashboard_item]['id'] dashboard_item_with_issues_row[ 'api_link'] = instance[ 'url'] + '/api/' + api_endpoint item = api_source.get(api_endpoint, params={ "fields": "*" }).json() except RequestException as e: logger.error(dashboard_item + ' ' + dashboardItem[dashboard_item]['id'] + " BROKEN with error " + str(e)) dashboard_item_with_issues_row['issue'] = str(e) errors_found += 1 else: dashboard_item_with_issues_row['name'] = item[ 'name'] if dashboard_item in ['eventReport', 'eventChart']: continue # Try to get the data try: if dashboard_item == 'map': for map_view in item['mapViews']: params = build_analytics_payload( map_view, args.verbose) if params != {}: if 'layer' in map_view and map_view[ 'layer'] == 'event' and 'program' in map_view: data = api_source.get( 'analytics/events/query/' + map_view['program']['id'], params=params).json() else: data = api_source.get( 'analytics', params=params).json() else: data = api_source.get( 'analytics', params=build_analytics_payload( item, args.verbose)).json() except RequestException as e: logger.error( dashboard_item + ' ' + dashboardItem[dashboard_item]['id'] + " data cannot be retrieved with error " + str(e)) dashboard_item_with_issues_row['issue'] = str( e) errors_found += 1 else: # print(data['rows']) if args.no_data_warning and ( 'rows' not in data or len(data['rows']) == 0): dashboard_item_with_issues_row[ 'issue'] = 'NO DATA' logger.warning( dashboardItem[dashboard_item]['id'] + ': NO DATA!!!') #exit(0) if dashboard_item_type_found and dashboard_item_with_issues_row[ 'issue'] != "": if dashboard_item_with_issues_row[ 'type'] == 'visualization': dashboard_item_with_issues_row['app_link'] = instance['url'] + \ '/dhis-web-data-visualizer/index.html#/' + \ dashboard_item_with_issues_row['uid'] elif dashboard_item_with_issues_row['type'] == 'map': dashboard_item_with_issues_row['app_link'] = instance['url'] + \ '/dhis-web-maps/index.html' elif dashboard_item_with_issues_row[ 'type'] == 'eventReport': dashboard_item_with_issues_row['app_link'] = instance['url'] + \ 'dhis-web-event-reports/index.html?id=' + \ dashboard_item_with_issues_row['uid'] elif dashboard_item_with_issues_row[ 'type'] == 'eventChart': dashboard_item_with_issues_row['app_link'] = instance['url'] + \ '/dhis-web-event-visualizer/index.html?id=' + \ dashboard_item_with_issues_row['uid'] df = df.append(dashboard_item_with_issues_row, ignore_index=True) export_csv = df.to_csv(instance['name'] + '.csv', index=None, header=True) # Release log handlers handlers = logger.handlers[:] for handler in handlers: handler.close() logger.removeHandler(handler) return errors_found
def choices_with_ratio(values, ratios, k): # values -> list of values to use. It can be just a simple value or an interval delimited with : # ratios -> list of ratios to use. Must have same length as ratios # k -> Number of values to generate # Make sure ratio is not Nan or empty string ratios = [x if not isnull(x) and x != "" else float(0) for x in ratios] rationed_number = [int(round(x * k)) for x in ratios] # The total number of values which will be generated applying the ratios and rounding the result total_generated = sum(rationed_number) if len(ratios) > k or total_generated > (k + k / 2): logger.warning( 'The number of values to generate is too small for the high amount of ratios provided' ) if total_generated != k: # Find the ratios to correct # The idea is that if we have generated less than the total we will randomly increase the elements # having the highest ratio (to get more of what we should have more). Otherwise we will decrease # the elements with lowest ratio (to get less of what we should have less) if total_generated < k: ratios_to_correct = max(ratios) else: ratios_to_correct = min(ratios) if ratios_to_correct == float(0): #Remove them from ratios tmp_ratios = [ ratios[i] for i in range(len(ratios)) if ratios[i] != ratios_to_correct ] ratios_to_correct = min(tmp_ratios) # Index returns the first occurrence # highest_ratio_index = ratios.index(highest_ratio) # Find all occurrences indices = [ i for i in range(len(ratios)) if ratios[i] == ratios_to_correct ] number_of_iterations = 0 while total_generated != k: if total_generated < k: # Add 1 to element with highest ratio rationed_number[choice(indices)] += 1 elif total_generated > k: # Subtract 1 to element with highest ratio choosen_random_index = choice(indices) if rationed_number[choosen_random_index] > 0: rationed_number[choosen_random_index] -= 1 else: # Take it out from ratios and recalculate ratios[choosen_random_index] = 0.0 minimum_ratio_not_zero = 1.0 indices = list() for r in ratios: if r < minimum_ratio_not_zero and r != 0.0: minimum_ratio_not_zero = r indices = [ i for i in range(len(ratios)) if ratios[i] == minimum_ratio_not_zero ] total_generated = sum(rationed_number) number_of_iterations += 1 # We should not spend here too much time, otherwise it is worth resetting the indexes # if number_of_iterations == 25: # indices = [i for i in range(len(ratios)) if ratios[i] <= (ratios_to_correct+0.1)] # number_of_iterations = 0 # Create list of values to return choices = list() if ':' not in values[0]: for i in range(0, len(values)): choices.extend([values[i]] * rationed_number[i]) else: for i in range(0, len(values)): min_max_values = str(values[i]).split(":") if len(min_max_values) == 2: if isInt(min_max_values[0]) and isInt(min_max_values[1]): min_value = int(min_max_values[0]) max_value = int(min_max_values[1]) if min_value < max_value: choices.extend( numpy.random.randint(min_value, max_value, rationed_number[i])) else: logger.error('min value ' + str(min_value) + ' is greater than max value ' + str(max_value)) elif isFloat(min_max_values[0]) and isFloat(min_max_values[1]): min_value = float(min_max_values[0]) max_value = float(min_max_values[1]) if min_value < max_value: choices.extend( numpy.random.uniform(min_value, max_value, rationed_number[i])) else: logger.error('min value ' + str(min_value) + ' is greater than max value ' + str(max_value)) elif isDateFormat(min_max_values[0]) and (isDateFormat( min_max_values[1]) or min_max_values[1] == 'today'): min_date = datetime.strptime(min_max_values[0], '%Y-%m-%d').date() if min_max_values[1] == 'today': max_date = date.today() else: max_date = datetime.strptime(min_max_values[1], '%Y-%m-%d').date() if min_date < max_date: days_between_dates = (max_date - min_date).days random_days = numpy.random.randint( 0, days_between_dates, rationed_number[i]) # For the moment, return date type choices.extend( list( map( lambda x: (min_date + timedelta(days=int(x))), random_days))) else: logger.error('min date ' + min_max_values[0] + ' is greater than max date ' + min_max_values[1]) else: logger.error('Could not recognize value type for ' + min_max_values) shuffle(choices) return choices
def main(): logger.warning("Server source running DHIS2 version {} revision {}".format( api.version, api.revision)) import argparse my_parser = argparse.ArgumentParser( prog='delete_TEIs', description='Delete all TEIs created by robot', epilog="", formatter_class=argparse.RawDescriptionHelpFormatter) my_parser.add_argument('Program_UID', metavar='program_uid', type=str, help='the uid of the program to use') my_parser.add_argument( '-ou', '--org_unit', action="store", dest="OrgUnit", type=str, help= 'Rather than deleting from the root of the tree, deletes from a specific orgUnit including descendants' 'Eg: --ou=Q7RbNZcHrQ9') args = my_parser.parse_args() program_uid = args.Program_UID if not is_valid_uid(program_uid): logger.error('The program uid specified is not a valid DHIS2 uid') exit(1) else: try: program = api.get('programs/' + program_uid).json() except RequestException as e: if e.code == 404: logger.error('Program ' + program_uid + ' specified does not exist') exit(1) ou = 'GD7TowwI46c' # Trainingland if args.OrgUnit is not None: if not is_valid_uid(args.OrgUnit): logger.error('The orgunit uid specified is not a valid DHIS2 uid') exit(1) else: try: orgunit = api.get('organisationUnits/' + args.OrgUnit).json() except RequestException as e: if e.code == 404: logger.error('Org Unit ' + args.OrgUnit + ' specified does not exist') exit(1) else: ou = orgunit[0] params = { 'ou': ou, 'ouMode': 'DESCENDANTS', 'program': program_uid, 'skipPaging': 'true', #'lastUpdatedDuration': '4d', #'fields': '*' 'fields': 'trackedEntityInstance,enrollments' } data = api.get('trackedEntityInstances', params=params).json()['trackedEntityInstances'] logger.info("Found " + str(len(data)) + " TEIs") user = '******' for tei in data: # #### Uncomment this to filter by user if 'enrollments' not in tei: import json logger.info(json.dumps(tei, indent=4)) if tei["enrollments"][0]["storedBy"] != user: logger.warning("Skipping tei stored by " + tei["enrollments"][0]["storedBy"]) continue # #### tei_uid = tei['trackedEntityInstance'] try: response = api.delete('trackedEntityInstances/' + tei_uid) except RequestException as e: logger.error(e) pass else: logger.info("TEI " + tei_uid + " removed")