def write_to_csv(typ, indicators, object_mapping, file_name): data = [] if typ == 'indicators': header_row = indicator_fields.keys() for indicator in format_indicator(typ, indicators, object_mapping): data.append([ indicator.type, indicator.uid, indicator.name, indicator.short_name, indicator.numerator, indicator.numerator_description, indicator.denominator, indicator.denominator_description, indicator.annualized, indicator.indicator_type, indicator.decimals, indicator.last_updated ]) write_csv(data, file_name, header_row) logger.info("Success! CSV file exported to {}".format(file_name)) elif typ == 'programIndicators': header_row = program_indicator_fields.keys() for program_indicator in format_indicator(typ, indicators, object_mapping): data.append([ program_indicator.type, program_indicator.uid, program_indicator.name, program_indicator.short_name, program_indicator.expression, program_indicator.filter, program_indicator.aggregation_type, program_indicator.analytics_type, program_indicator.program, program_indicator.program_name, program_indicator.last_updated ]) utils.write_csv(data, file_name, header_row) logger.info("Success! CSV file exported to {}".format(file_name))
def main(): setup_logger() args = parse_args() api = create_api(server=args.server, username=args.username, password=args.password) p = { 'fields': 'id,name,description,leftSide[expression],rightSide[expression]', 'paging': False } data = api.get('validationRules', params=p).json() uid_cache = set() for i, rule in enumerate(data['validationRules'], 1): info_msg = "{}/{} Analyzing Validation Rule '{}' ({})" logger.info( info_msg.format(i, len(data['validationRules']), rule['name'], rule['id'])) uids_in_expressions = extract_uids(rule) for uid in uids_in_expressions: if uid not in uid_cache: try: api.get('identifiableObjects/{}'.format(uid)).json() except APIException as exc: if exc.code == 404: logger.warn( "UID in expression not identified: {}".format(uid)) else: logger.error(exc) else: uid_cache.add(uid)
def test_setup_logger_default(): from dhis2 import logger, setup_logger setup_logger() logger.info("info") logger.warning("warn") logger.debug("debug") logger.error("error")
def format_user(users, ou_map): User = namedtuple( 'User', 'name first_name surname username phone_number ' 'last_login user_groups user_roles org_units dv_org_units') logger.info('Exporting {} users...'.format(len(users['users']))) for user in users['users']: User.name = u'{}'.format(user['name']) User.first_name = u'{}'.format( user['userCredentials']['userInfo']['firstName']) User.surname = u'{}'.format( user['userCredentials']['userInfo']['surname']) User.username = u'{}'.format(user['userCredentials']['username']) User.phone_number = u'{}'.format( user['userCredentials']['userInfo'].get('phoneNumber', '-')) User.last_login = u'{}'.format(user['userCredentials'].get( 'lastLogin', '-')) User.user_groups = ", ".join([ug['name'] for ug in user['userGroups']]) User.user_roles = ", ".join( [ur['name'] for ur in user['userCredentials']['userRoles']]) User.org_units = u"\n".join([ replace_path(ou_map, elem) for elem in [ou['path'] for ou in user['organisationUnits']] ]) User.dv_org_units = u"\n".join([ replace_path(ou_map, elem) for elem in [ou['path'] for ou in user['dataViewOrganisationUnits']] ]) yield User
def main(): # Print DHIS2 Info logger.warn( "You are running on DHIS2 version {} revision {} - " "Last Analytics generation was at: {}".format( api.version, api.revision, api.info.get("lastAnalyticsTableSuccess") ) ) # GET dataElements that contain ANC in its name params = {"filter": "name:like:ANC", "paging": False, "fields": ":owner"} data_elements = api.get("dataElements", params=params).json() # Loop through each dataElement for de in data_elements["dataElements"]: # Add (updated) to the name de["name"] = "{} (updated)".format(de["name"]) try: # Replace the dataElement on the server api.put( "dataElements/{}".format(de["id"]), params={"mergeMode": "REPLACE"}, json=de, ) except RequestException as e: # Print errors returned from DHIS2 logger.error( "Updating DE '{}' ({}) failed: {}".format(de["name"], de["id"], e) ) else: # Print success message logger.info("Updated DE '{}' ({}) successful".format(de["name"], de["id"]))
def check_validation_rules(api): p = { 'fields': 'id,name,description,leftSide[expression],rightSide[expression]', 'paging': False } data = api.get('validationRules', params=p).json() logger.info("*** CHECKING {} VALIDATION RULES... ***".format( len(data['validationRules']))) for rule in data['validationRules']: uid_cache = set() uids_in_expressions = extract_uids(rule) for uid in uids_in_expressions: if uid not in uid_cache: try: api.get('identifiableObjects/{}'.format(uid)).json() except RequestException as exc: if exc.code == 404: logger.warn( "Validation Rule '{}' ({}) - " "UID in expression not identified: {}".format( rule['name'], rule['id'], uid)) uid_cache.add(uid) else: logger.error(exc) else: uid_cache.add(uid)
def post_to_server(api, jsonObject, apiObject='metadata', strategy='CREATE_AND_UPDATE'): try: response = api.post(apiObject, params={'mergeMode': 'REPLACE', 'importStrategy': strategy}, json=jsonObject) except RequestException as e: # Print errors returned from DHIS2 logger.error("metadata update failed with error " + str(e)) pass else: if response is None: logger.error("Error in response from server") return text = json.loads(response.text) # print(text) if text['status'] == 'ERROR': logger.error("Import failed!!!!\n" + json.dumps(text['typeReports'], indent=4, sort_keys=True)) return False # errorCode = errorReport['errorCode'] else: if apiObject == 'metadata': logger.info("metadata imported " + text['status'] + " " + json.dumps(text['stats'])) else: # logger.info("data imported " + text['status'] + " " + json.dumps(text['importCount'])) logger.info("Data imported\n" + json.dumps(text, indent=4, sort_keys=True)) if text['status'] == 'WARNING': logger.warning(text) return True
def main(): # Print DHIS2 Info logger.warn("You are running on DHIS2 version {} revision {} - " "Last Analytics generation was at: {}".format( api.version, api.revision, api.info.get('lastAnalyticsTableSuccess'))) # GET dataElements that contain ANC in its name params = {'filter': 'name:like:ANC', 'paging': False, 'fields': ':owner'} data_elements = api.get('dataElements', params=params).json() # Loop through each dataElement for de in data_elements['dataElements']: # Add (updated) to the name de['name'] = '{} (updated)'.format(de['name']) try: # Replace the dataElement on the server api.put('dataElements/{}'.format(de['id']), params={'mergeMode': 'REPLACE'}, json=de) except RequestException as e: # Print errors returned from DHIS2 logger.error("Updating DE '{}' ({}) failed: {}".format( de['name'], de['id'], e)) else: # Print success message logger.info("Updated DE '{}' ({}) successful".format( de['name'], de['id']))
def create_api(server=None, username=None, password=None, api_version=None): """Return a fully configured dhis2.Dhis instance""" if not any([server, username, password]): api = Dhis.from_auth_file(api_version=api_version, user_agent='dhis2-pk/{}'.format(__version__)) logger.info("Found a file for server {}".format(api.base_url)) return api else: return Dhis(server, username, password, api_version, 'dhis2-pk/{}'.format(__version__))
def main(): args, password = parse_args() setup_logger(include_caller=False) api = create_api(server=args.server, username=args.username, password=password) validate_file(args.css) post_file(api, filename=args.css) logger.info( "{} CSS posted to {}. Clear your Browser cache / use Incognito.". format(args.css, api.api_url))
def check_category_combos(api): cat_combo = api.get('categoryCombos', params={ 'fields': 'id,name', 'paging': False }).json() logger.info("*** CHECKING {} CATEGORY COMBOS... ***".format( len(cat_combo['categoryCombos']))) for cc in cat_combo['categoryCombos']: data_elements_with_cc = api.get('dataElements', params={ 'fields': 'id,name', 'filter': 'categoryCombo.id:eq:{}'.format( cc['id']) }).json()['dataElements'] data_set_elements_with_cc = api.get( 'dataElements', params={ 'fields': 'id,name', 'filter': 'dataSetElements.categoryCombo.id:eq:{}'.format(cc['id']) }).json()['dataElements'] programs_with_cc = api.get('programs', params={ 'fields': 'id,name', 'filter': 'categoryCombo.id:eq:{}'.format( cc['id']) }).json()['programs'] datasets_with_cc = api.get('dataSets', params={ 'fields': 'id,name', 'filter': 'categoryCombo.id:eq:{}'.format( cc['id']) }).json()['dataSets'] if not any([ data_elements_with_cc, data_set_elements_with_cc, programs_with_cc, datasets_with_cc ]): logger.warn( "Category Combo '{}' ({}) is not assigned " "to any Data Element, Data Set Element, Program or Data Set". format(cc['name'], cc['id']))
def test_setup_logger_to_file(): from dhis2 import logger, setup_logger filename = os.path.join(tempfile.gettempdir(), 'logfile.log') setup_logger(logfile=filename) logger.info("info") logger.warning("warn") logger.debug("debug") logger.error("error") assert os.path.isfile(filename)
def create_dummy_attributes(tei): new_attributes = list() if len(tei['attributes']) > 0: gender = choice(['M', 'F']) for tea in tei['attributes']: tea_uid = tea['attribute'] new_attributes.append({ 'attribute': tea_uid, 'value': create_dummy_value(tea_uid, gender) }) logger.info(json.dumps(new_attributes, indent=4)) # , sort_keys=True))
def check_option_sets(api): option_sets = api.get('optionSets', params={ 'fields': 'id,name,options', 'paging': False }).json() logger.info("*** CHECKING {} OPTION SETS... ***".format( len(option_sets['optionSets']))) [ logger.warn("Option Set '{}' ({}) has no options".format( o['name'], o['id'])) for o in option_sets['optionSets'] if not o.get('options') ] for option_set in option_sets['optionSets']: data_elements_with_optionset = api.get('dataElements', params={ 'fields': 'id,name', 'filter': 'optionSet.id:eq:{}'.format( option_set['id']) }).json()['dataElements'] tea_with_optionset = api.get('trackedEntityAttributes', params={ 'fields': 'id,name', 'filter': 'optionSet.id:eq:{}'.format( option_set['id']) }).json()['trackedEntityAttributes'] attributes_with_optionset = api.get('attributes', params={ 'fields': 'id,name', 'filter': 'optionSet.id:eq:{}'.format( option_set['id']) }).json()['attributes'] if not any([ data_elements_with_optionset, tea_with_optionset, attributes_with_optionset ]): logger.warn( "Option Set '{}' ({}) is not assigned " "to any Data Element, Tracked Entity Attribute or Attribute". format(option_set['name'], option_set['id']))
def post_chunked_data(api_endpoint, data_list, json_key, chunk_max_size): number_elems = len(data_list) if number_elems <= chunk_max_size: post_to_server(api_endpoint, {json_key: data_list}, json_key) chunk = dict() if number_elems < chunk_max_size: chunk_max_size = number_elems count = 0 for x in range(0, number_elems, chunk_max_size): chunk[json_key] = data_list[x:((x + chunk_max_size) if number_elems > ( x + chunk_max_size) else number_elems)] count += 1 retries = 0 while retries <= 5: try: response = api_endpoint.post(json_key, params={ 'mergeMode': 'REPLACE', 'strategy': 'CREATE_AND_UPDATE' }, json=chunk) except RequestException as e: logger.error(str(e)) time.sleep(3) retries += 1 else: # Print success message text = json.loads(response.text) if 'status' in text and text['status'] == 'ERROR': errorReport = text['typeReports'][0]['objectReports'][0][ 'errorReports'][0] logger.error(errorReport) errorCode = errorReport['errorCode'] else: if 'response' in text: for key in [ 'importSummaries', 'importOptions', 'responseType' ]: if key in text: text.pop(key, None) logger.info( json.dumps(text['response'], indent=4, sort_keys=True)) logger.info("Operation successful: chunk " + str(count) + " of " + str(json_key) + " created/updated") break
def check_categories(api): categories = api.get('categories', params={ 'fields': 'id,name,categoryCombos', 'paging': False }).json() logger.info("*** CHECKING {} CATEGORIES... ***".format( len(categories['categories']))) [ logger.warn("Category '{}' ({}) is not in any Category Combo".format( c['name'], c['id'])) for c in categories['categories'] if not c.get('categoryCombos') ]
def main(): # load the JSON file that sits next to the script data = load_json('2_import_metadata.json') try: # import metadata api.post('metadata.json', params={ 'preheatCache': False, 'strategy': 'CREATE' }, json=data) except RequestException as e: logger.error("Import failed: {}".format(e)) else: logger.info("Import successful!")
def check_category_options(api): category_options = api.get('categoryOptions', params={ 'fields': 'id,name,categories', 'paging': False }).json() logger.info("*** CHECKING {} CATEGORY OPTIONS... ***".format( len(category_options['categoryOptions']))) [ logger.warn("Category Option '{}' ({}) is not in any Category".format( co['name'], co['id'])) for co in category_options['categoryOptions'] if not co.get('categories') ]
def main(): setup_logger(include_caller=False) args = parse_args() if args.logging_to_file: if args.debug: setup_logger(logfile=args.logging_to_file, log_level=DEBUG, include_caller=True) else: setup_logger(logfile=args.logging_to_file, include_caller=False) elif args.debug: setup_logger(log_level=DEBUG, include_caller=True) api = create_api(server=args.server, username=args.username, password=args.password, api_version=args.api_version) validate_args(args, api.version_int) public_access = Permission.from_public_args(args.public_access) collection = ShareableObjectCollection(api, args.object_type, args.filter) usergroups = UserGroupsCollection(api, args.groups) validate_data_access(public_access, collection, usergroups, api.version_int) logger.info(u"Public access ➜ {}".format(public_access)) for i, element in enumerate(collection.elements, 1): update = ShareableObject(obj_type=element.obj_type, uid=element.uid, name=element.name, code=element.code, public_access=public_access, usergroup_accesses=usergroups.accesses) pointer = u"{0}/{1} {2} {3}".format(i, len(collection.elements), collection.name, element.uid) if not skip(args.overwrite, element, update): logger.info(u"{0} {1}".format(pointer, element.log_identifier)) share(api, update) else: logger.warning(u'Not overwriting: {0} {1}'.format( pointer, element.log_identifier))
def main(): # load the JSON file that sits next to the script data = load_json("2_import_metadata.json") try: # import metadata api.post( "metadata.json", params={ "preheatCache": False, "strategy": "CREATE" }, json=data, ) except RequestException as e: logger.error("Import failed: {}".format(e)) else: logger.info("Import successful!")
def main(): setup_logger() args, password = parse_args() api = create_api(server=args.server, username=args.username, password=password) params1 = { 'fields': 'name,' 'userCredentials[username,lastLogin,userRoles[name],userInfo[phoneNumber,firstName,surname]],' 'organisationUnits[path],userGroups[name],' 'dataViewOrganisationUnits[path]', 'paging': False } users = api.get(endpoint='users', params=params1).json() params2 = {'fields': 'id,name', 'paging': False} ou_map = { ou['id']: ou['name'] for ou in api.get(endpoint='organisationUnits', params=params2).json() ['organisationUnits'] } file_name = "userinfo-{}.csv".format(file_timestamp(api.api_url)) data = [] header_row = [ 'name', 'firstName', 'surname', 'username', 'phoneNumber', 'lastLogin', 'userGroups', 'userRoles', 'orgunitPaths', 'dataViewOrgunitPaths' ] for user in format_user(users, ou_map): data.append([ user.name, user.first_name, user.surname, user.username, user.phone_number, user.last_login, user.user_groups, user.user_roles, user.org_units, user.dv_org_units ]) write_csv(data, file_name, header_row) logger.info("Success! CSV file exported to {}".format(file_name))
def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) p = { 'paging': False, 'filter': 'name:like:HNQIS', 'fields': 'id,name' } programs = api.get('programs', params=p) print("event_date,program,name,event,_OverallScore,0CS-100,diff") fix_them = [] csparams = { 'filter': ['shortName:like:.0CS-100', 'name:!ilike:_DEL'], 'paging': False, 'fields': 'id' } root_compscores = [x['id'] for x in api.get('dataElements', params=csparams).json()['dataElements']] for p in programs['programs']: params = { 'program': p['id'], 'skipPaging': True, 'fields': '[*]' } events = api.get('events', params=params).json() for event in events['events']: if analyze_event(p, event, root_compscores): fix_them.append(event) if fix_them and args.fix_values: logger.info(u"Fixing those events and resetting _Order Forward...") for i, e in enumerate(fix_them, 1): fixed = fix_event(e, root_compscores) logger.info(u"[{}/{}] Pushing event {}...".format(i, len(fix_them), e['event'])) api.put('events/{}'.format(e['event']), data=fixed) else: logger.warn(u"Not fixing events")
def __init__(self, api, groups): self.api = api self.accesses = set() if not groups: logger.info( "No User Groups specified, only setting Public Access.") else: for group in groups: group_filter = group[0] permission = Permission.from_group_args(group) delimiter, root_junction = set_delimiter( api.version_int, group_filter) filter_list = group_filter.split(delimiter) usergroups = self.get_usergroup_uids(filter_list, root_junction) log_msg = u"User Groups with filter [{}]" logger.info( log_msg.format( u" {} ".format(root_junction).join(filter_list))) for uid, name in iteritems(usergroups): logger.info(u"- {} '{}' {} {}".format( uid, name, ARROW, permission)) self.accesses.add(UserGroupAccess(uid, permission))
def get_objects(self): """ Get the actual objects from DHIS 2 :return: requests response """ params = { 'fields': 'id,name,code,publicAccess,userGroupAccesses', 'paging': False } split = None if self.filters: split = self.filters.split(self.delimiter) params['filter'] = split if self.root_junction == 'OR': params['rootJunction'] = self.root_junction response = self.api.get(self.plural, params=params) if response: amount = len(response.json()[self.plural]) if amount > 0: if amount == 1: name = self.name else: name = self.plural if self.filters: print_msg = u"Sharing {} {} with filter [{}]" logger.info( print_msg.format( amount, name, " {} ".format(self.root_junction).join(split))) else: print_msg = u"Sharing *ALL* {} {} (no filters set!). Continuing in 10 seconds..." logger.warning(print_msg.format(amount, name)) time.sleep(10) return response else: logger.warning(u'No {} found - check your filter'.format( self.plural)) sys.exit(0)
def main(): setup_logger(include_caller=False) args, password = parse_args() api = create_api(server=args.server, username=args.username, password=password, api_version=args.api_version) file_name = '{}-{}.csv'.format(args.indicator_type, file_timestamp(api.api_url)) if args.indicator_type == 'indicators': fields = ','.join( [x for x in indicator_fields.values() if x != 'type']) elif args.indicator_type == 'programIndicators': fields = ','.join([ x for x in program_indicator_fields.values() if x not in ('type', 'program_name') ]) else: raise SystemExit('Cannot process argument -t {}'.format( args.indicator_type)) indicators = api.get(endpoint=args.indicator_type, params=get_params(args.indicator_filter, fields)).json() message = analyze_result(args.indicator_type, indicators, args.indicator_filter) logger.info(message) logger.info("Analyzing metadata...") object_mapping = object_map(api) write_to_csv(api, args.indicator_type, indicators, object_mapping, file_name)
def main(): setup_logger() args = parse_args() api = create_api(server=args.server, username=args.username, password=args.password) Attribute = namedtuple('Attribute', 'uid name') Attribute.uid = args.attribute_uid Attribute.name = get_attribute_name(api, args.attribute_uid) typ = args.object_type attribute_is_on_model(api, Attribute, typ) data = list(load_csv(args.source_csv)) validate_csv(data) logger.info(u"Updating values for Attribute '{}' ({}) on {} {} ...".format( Attribute.name, Attribute.uid, len(data), typ)) for i in range(3, 0, -1): time.sleep(i) print('Proceeding in {}...'.format(i)) for i, obj in enumerate(data, 1): obj_uid = obj['uid'] attribute_value = obj['attributeValue'] obj_old = api.get('{}/{}'.format(args.object_type, obj_uid), params={ 'fields': ':owner' }).json() obj_updated = create_or_update_attribute_values( obj_old, Attribute.uid, attribute_value) api.put('{}/{}'.format(typ, obj_uid), data=obj_updated) logger.info(u"{}/{} - Updated AttributeValue: {} - {}: {}".format( i, len(data), attribute_value, typ[:-1], obj_uid))
def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) if not is_valid_uid(args.attribute_uid): logger.error("Attribute {} is not a valid UID".format(args.attribute_uid)) data = list(load_csv(args.source_csv)) validate_csv(data) attr_get = {'fields': 'id,name,{}Attribute'.format(args.object_type[:-1])} attr = api.get('attributes/{}'.format(args.attribute_uid), params=attr_get).json() if attr['{}Attribute'.format(args.object_type[:-1])] is False: logger.error("Attribute {} is not assigned to type {}".format(args.attribute_uid, args.object_type[:-1])) logger.info( "[{}] - Updating Attribute Values for Attribute \033[1m{}\033[0m for \033[1m{}\033[0m \033[1m{}\033[0m...".format( args.server, args.attribute_uid, len(data), args.object_type)) try: time.sleep(3) except KeyboardInterrupt: logger.warn("\033[1m{}\033[0m".format("Aborted!")) pass for i, obj in enumerate(data, 1): obj_uid = obj.get('key') attribute_value = obj.get('value') params_get = {'fields': ':owner'} obj_old = api.get('{}/{}'.format(args.object_type, obj_uid), params=params_get).json() obj_updated = create_or_update_attributevalues(obj=obj_old, attribute_uid=args.attribute_uid, attribute_value=attribute_value) api.put('{}/{}'.format(args.object_type, obj_uid), params=None, data=obj_updated) logger.info(u"{}/{} - Updated AttributeValue: {} - {}: {}".format(i, len(data), attribute_value, args.object_type[:-1], obj_uid))
def dump_to_file(data): ts = datetime.now().strftime("%Y-%m-%d-%H-%M-%S") filename = "healtharea_indicators_backup_{}.json".format(ts) with open(filename, 'w') as out: json.dump(data, out, indent=4) logger.info("Before state backed up to \033[1m{}\033[0m".format(filename))
def main(): setup_logger(include_caller=False) args, password = parse_args() if args.logging_to_file: if args.debug: setup_logger(logfile=args.logging_to_file, log_level=DEBUG, include_caller=True) else: setup_logger(logfile=args.logging_to_file, include_caller=False) elif args.debug: setup_logger(log_level=DEBUG, include_caller=True) api = create_api(server=args.server, username=args.username, password=password, api_version=args.api_version) validate_args(args, api.version_int) public_access_permission = Permission.from_public_args(args.public_access) collection = ShareableObjectCollection(api, args.object_type, args.filter) usergroups = UserGroupsCollection(api, args.groups) validate_data_access(public_access_permission, collection, usergroups, api.version_int) # sort by name try: elements = sorted(collection.elements, key=operator.attrgetter('name')) except AttributeError: elements = collection.elements # handle log messages and collection-wide public access and usergroup access if applicable if args.extend: if not args.public_access: logger.warning(u"Public access {} INHERIT".format(ARROW)) else: logger.info(u"Public access {} {}".format( ARROW, public_access_permission)) public_access = Permission.from_public_args(args.public_access) logger.warning(u"Extending with additional User Groups...") else: logger.info(u"Public access {} {}".format(ARROW, public_access_permission)) public_access = Permission.from_public_args(args.public_access) usergroup_accesses = usergroups.accesses time.sleep(2) for i, element in enumerate(elements, 1): if args.extend: # merge user group accesses usergroup_accesses = merge(server_uga=element.usergroup_accesses, local_uga=usergroups.accesses) # if public access is not provided via argument, re-use public access from object on server if not args.public_access: public_access = element.public_access # no issue for public_access and usergroup_accesses since it's set above with same if/else check # to improve performance and allow for logical logging message placement # noinspection PyUnboundLocalVariable update = ShareableObject(obj_type=element.obj_type, uid=element.uid, name=element.name, code=element.code, public_access=public_access, usergroup_accesses=usergroup_accesses) pointer = u"{0}/{1} {2} {3}".format(i, len(collection.elements), collection.name, element.uid) if not skip(args.overwrite, element, update): logger.info(u"{0} {1}".format(pointer, element.log_identifier)) share(api, update) else: logger.warning(u'Skipping (already shared): {0} {1}'.format( pointer, element.log_identifier))
"\nIf an existing xlsx file is provided, it creates a file _new.xlsx with updated keys and EN strings." "\nEg: --get_dict_from_form=my_file_name") my_parser.add_argument('-post', '--update_form_from_dict', action="store", metavar='file_name', nargs=1, help="Use dictionary in xlsx format to update translations in form" "\nEg: --update_form_from_dict=my_file.xlsx") my_parser.add_argument('-gk', '--generate_keys', action='store_true', help='This optional argument makes sure the keys are regenerated in the html form and the dict') args = my_parser.parse_args() if args.get_dict_from_form is None and args.update_form_from_dict is None: logger.error('Please specify at least one option. Try with -h to check for command line help') exit(1) mode = 'get' if args.get_dict_from_form is not None: logger.info("Creating dictionary") if '.xlsx' not in args.get_dict_from_form: output_file_name = args.get_dict_from_form + '.xlsx' print(output_file_name) elif args.update_form_from_dict is not None: mode = 'post' logger.info("Updating custom forms") input_file_name = args.update_form_from_dict[0] try: xls = pd.ExcelFile(input_file_name) except FileNotFoundError: logger.error('File ' + input_file_name + ' does not exist') if mode == 'get': # Check if file exists, so we can update it update = False