def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) data = list(load_csv(args.source_csv)) validate_csv(data) programs_csv = [h.strip() for h in data[0] if h != 'orgunit'] if not programs_csv: raise ValueError('No programs found') params_get = {'fields': 'id', 'paging': False} programs_server = [ p['id'] for p in api.get('programs', params=params_get).json()['programs'] ] for p in programs_csv: if p not in programs_server: logger.error( u"Program {0} is not a valid program: {1}/programs/{0}.json". format(p, api.api_url)) program_orgunit_map = get_program_orgunit_map(data) metadata_payload = [] final = {} for program_uid, orgunit_list in iteritems(program_orgunit_map): params_get = {'fields': ':owner'} program = api.get('programs/{}'.format(program_uid), params=params_get).json() updated = set_program_orgunits(program, orgunit_list, args.append_orgunits) metadata_payload.append(updated) with open('backup_{}.json'.format(program_uid), 'w') as f: json.dump(program, f, indent=4) print(u"[{}] - Assigning \033[1m{} (total: {})\033[0m " u"OrgUnits to Program \033[1m{}\033[0m...".format( args.server, len(orgunit_list), len(program['organisationUnits']), program['name'])) final['programs'] = [updated] params_post = {"mergeMode": "REPLACE", "strategy": "UPDATE"} api.post(endpoint='metadata', params=params_post, data=final)
def main(): setup_logger() args = parse_args() api = create_api(server=args.server, username=args.username, password=args.password) Attribute = namedtuple('Attribute', 'uid name') Attribute.uid = args.attribute_uid Attribute.name = get_attribute_name(api, args.attribute_uid) typ = args.object_type attribute_is_on_model(api, Attribute, typ) data = list(load_csv(args.source_csv)) validate_csv(data) logger.info(u"Updating values for Attribute '{}' ({}) on {} {} ...".format( Attribute.name, Attribute.uid, len(data), typ)) for i in range(3, 0, -1): time.sleep(i) print('Proceeding in {}...'.format(i)) for i, obj in enumerate(data, 1): obj_uid = obj['uid'] attribute_value = obj['attributeValue'] obj_old = api.get('{}/{}'.format(args.object_type, obj_uid), params={ 'fields': ':owner' }).json() obj_updated = create_or_update_attribute_values( obj_old, Attribute.uid, attribute_value) api.put('{}/{}'.format(typ, obj_uid), data=obj_updated) logger.info(u"{}/{} - Updated AttributeValue: {} - {}: {}".format( i, len(data), attribute_value, typ[:-1], obj_uid))
def main(): args = parse_args() setup_logger() api = Api(server=args.server, username=args.username, password=args.password) if not is_valid_uid(args.attribute_uid): logger.error("Attribute {} is not a valid UID".format(args.attribute_uid)) data = list(load_csv(args.source_csv)) validate_csv(data) attr_get = {'fields': 'id,name,{}Attribute'.format(args.object_type[:-1])} attr = api.get('attributes/{}'.format(args.attribute_uid), params=attr_get).json() if attr['{}Attribute'.format(args.object_type[:-1])] is False: logger.error("Attribute {} is not assigned to type {}".format(args.attribute_uid, args.object_type[:-1])) logger.info( "[{}] - Updating Attribute Values for Attribute \033[1m{}\033[0m for \033[1m{}\033[0m \033[1m{}\033[0m...".format( args.server, args.attribute_uid, len(data), args.object_type)) try: time.sleep(3) except KeyboardInterrupt: logger.warn("\033[1m{}\033[0m".format("Aborted!")) pass for i, obj in enumerate(data, 1): obj_uid = obj.get('key') attribute_value = obj.get('value') params_get = {'fields': ':owner'} obj_old = api.get('{}/{}'.format(args.object_type, obj_uid), params=params_get).json() obj_updated = create_or_update_attributevalues(obj=obj_old, attribute_uid=args.attribute_uid, attribute_value=attribute_value) api.put('{}/{}'.format(args.object_type, obj_uid), params=None, data=obj_updated) logger.info(u"{}/{} - Updated AttributeValue: {} - {}: {}".format(i, len(data), attribute_value, args.object_type[:-1], obj_uid))
def test_csv_no_valid_headers(): f = list(load_csv(path=os.path.join(PATH, 'headers.csv'))) with pytest.raises(PKClientException): validate_csv(f)
def test_csv_duplicate_objects(): f = list(load_csv(path=os.path.join(PATH, 'duplicates.csv'))) with pytest.raises(PKClientException): validate_csv(f)
def test_csv_file_valid(): f = list(load_csv(path=os.path.join(PATH, 'valid.csv'))) assert validate_csv(f)