def test_x_descr_oid(topology): """Test import of an attribute using descr-oid format that starts with an X-. This should "fail" with a descriptive error message. :id: 9308bdbd-363c-45a9-8223-9a6c925dba37 :setup: Standalone instance :steps: 1. Add invalid x-attribute 2. Add valid x-attribute 3. Add invalid x-object 4. Add valid x-object :expectedresults: 1. raises INVALID_SYNTAX 2. success 3. raises INVALID_SYNTAX 4. success """ inst = topology.standalone schema = Schema(inst) with pytest.raises(ldap.INVALID_SYNTAX): schema.add('attributeTypes', "( x-attribute-oid NAME 'x-attribute' DESC 'desc' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 X-ORIGIN 'user defined' )") schema.add('attributeTypes', "( 1.2.3.4.5.6.7.8.9.10 NAME 'x-attribute' DESC 'desc' EQUALITY caseIgnoreIA5Match SYNTAX 1.3.6.1.4.1.1466.115.121.1.26 X-ORIGIN 'user defined' )") with pytest.raises(ldap.INVALID_SYNTAX): schema.add('objectClasses', "( x-object-oid NAME 'x-object' DESC 'desc' SUP TOP AUXILIARY MAY ( x-attribute ) X-ORIGIN 'user defined' )") schema.add('objectClasses', "( 1.2.3.4.5.6.7.8.9.11 NAME 'x-object' DESC 'desc' SUP TOP AUXILIARY MAY ( x-attribute ) X-ORIGIN 'user defined' )")
def test_schema_reload(topology): """Test that the appropriate task entry is created when reloading schema.""" schema = Schema(topology.instance) task = schema.reload() assert task.exists() task.wait() assert task.get_exit_code() == 0
def list_all(inst, basedn, log, args): log = log.getChild('list_all') schema = Schema(inst) json = False if args is not None and args.json: json = True objectclass_elems = schema.get_objectclasses(json=json) attributetype_elems = schema.get_attributetypes(json=json) matchingrule_elems = schema.get_matchingrules(json=json) if json: print( dump_json( { 'type': 'schema', 'objectclasses': objectclass_elems, 'attributetypes': attributetype_elems, 'matchingrules': matchingrule_elems }, indent=4)) else: separator_line = "".join(["-" for _ in range(50)]) print("Objectclasses:\n", separator_line) for oc in objectclass_elems: print(oc) print("AttributeTypes:\n", separator_line) for at in attributetype_elems: print(at) print("MathingRules:\n", separator_line) for mr in matchingrule_elems: print(mr)
def _create_schema(request, topo): Schema(topo.standalone).\ add('attributetypes', ["( NAME 'testUserAccountControl' DESC 'Attribute Bitwise filteri-Multi-Valued'" "SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 )", "( NAME 'testUserStatus' DESC 'State of User account active/disabled'" "SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 )"]) Schema(topo.standalone).\ add('objectClasses', "( NAME 'testperson' SUP top STRUCTURAL MUST " "( sn $ cn $ testUserAccountControl $ " "testUserStatus )MAY( userPassword $ telephoneNumber $ " "seeAlso $ description ) X-ORIGIN 'BitWise' )") # Creating Backend backends = Backends(topo.standalone) backend = backends.create(properties={ 'nsslapd-suffix': SUFFIX, 'cn': 'AnujRoot' }) # Creating suffix suffix = Domain(topo.standalone, SUFFIX).create(properties={'dc': 'anuj'}) # Creating users users = UserAccounts(topo.standalone, suffix.dn, rdn=None) for user in [('btestuser1', ['514'], ['Disabled'], 100), ('btestuser2', ['65536'], ['PasswordNeverExpired'], 101), ('btestuser3', ['8388608'], ['PasswordExpired'], 102), ('btestuser4', ['256'], ['TempDuplicateAccount'], 103), ('btestuser5', ['16777216'], ['TrustedAuthDelegation'], 104), ('btestuser6', ['528'], ['AccountLocked'], 105), ('btestuser7', ['513'], ['AccountActive'], 106), ('btestuser11', ['655236'], ['TestStatus1'], 107), ('btestuser12', ['665522'], ['TestStatus2'], 108), ('btestuser13', ['266552'], ['TestStatus3'], 109), ('btestuser8', ['98536', '99512', '99528'], ['AccountActive', 'PasswordExxpired', 'AccountLocked'], 110), ('btestuser9', [ '87536', '912', ], [ 'AccountActive', 'PasswordNeverExpired', ], 111), ('btestuser10', ['89536', '97546', '96579'], ['TestVerify1', 'TestVerify2', 'TestVerify3'], 112)]: CreateUsers(users, user[0], user[1], user[2], user[3]).user_create() def fin(): """ Deletes entries after the test. """ for user in users.list(): user.delete() suffix.delete() backend.delete() request.addfinalizer(fin)
def test_valid(topo, validate_syntax_off): """Test syntax-validate task with valid entries :id: ec402a5b-bfb1-494d-b751-71b0d31a4d83 :setup: Standalone instance :steps: 1. Set nsslapd-syntaxcheck to off 2. Clean error log 3. Run syntax validate task 4. Assert that there are no errors in the error log 5. Set nsslapd-syntaxcheck to on :expectedresults: 1. It should succeed 2. It should succeed 3. It should succeed 4. It should succeed 5. It should succeed """ inst = topo.standalone log.info('Clean the error log') inst.deleteErrorLogs() schema = Schema(inst) log.info('Attempting to add task entry...') validate_task = schema.validate_syntax(DEFAULT_SUFFIX) validate_task.wait() exitcode = validate_task.get_exit_code() assert exitcode == 0 error_lines = inst.ds_error_log.match('.*Found 0 invalid entries.*') assert (len(error_lines) == 1) log.info('Found 0 invalid entries - Success')
def _gen_schema_plan(self): if self.olschema is None: return # Get the server schema so that we can query it repeatedly. schema = Schema(self.inst) schema_attrs = schema.get_attributetypes() schema_objects = schema.get_objectclasses() resolver = Resolver(schema_attrs) # Examine schema attrs for attr in self.olschema.attrs: # If we have been instructed to ignore this oid, skip. if attr.oid in self._schema_oid_do_not_migrate: continue if attr.oid in self._schema_oid_unsupported: self.plan.append(SchemaAttributeUnsupported(attr)) continue # For the attr, find if anything has a name overlap in any capacity. # overlaps = [ (names, ds_attr) for (names, ds_attr) in schema_attr_names if len(names.intersection(attr.name_set)) > 0] overlaps = [ ds_attr for ds_attr in schema_attrs if ds_attr.oid == attr.oid ] if len(overlaps) == 0: # We need to add attr self.plan.append(SchemaAttributeCreate(attr)) elif len(overlaps) == 1: # We need to possibly adjust attr ds_attr = overlaps[0] # We need to have a way to compare the two. if attr.inconsistent(ds_attr): self.plan.append(SchemaAttributeInconsistent( attr, ds_attr)) else: # Ambiguous attr, the admin must intervene to migrate it. self.plan.append(SchemaAttributeAmbiguous(attr, overlaps)) # Examine schema classes for obj in self.olschema.classes: # If we have been instructed to ignore this oid, skip. if obj.oid in self._schema_oid_do_not_migrate: continue if obj.oid in self._schema_oid_unsupported: self.plan.append(SchemaClassUnsupported(obj)) continue # For the attr, find if anything has a name overlap in any capacity. overlaps = [ ds_obj for ds_obj in schema_objects if ds_obj.oid == obj.oid ] if len(overlaps) == 0: # We need to add attr self.plan.append(SchemaClassCreate(obj)) elif len(overlaps) == 1: # We need to possibly adjust the objectClass as it exists ds_obj = overlaps[0] if obj.inconsistent(ds_obj, resolver): self.plan.append(SchemaClassInconsistent(obj, ds_obj)) else: # This should be an impossible state. raise Exception('impossible state')
def add_objectclass(inst, basedn, log, args): log = log.getChild('add_objectclass') schema = Schema(inst) parameters = _get_parameters(args, 'objectclasses') schema.add_objectclass(parameters) print("Successfully added the objectClass")
def add_attr(topology_m2, attr_name): """Adds attribute to the schema""" ATTR_VALUE = """(NAME '%s' \ DESC 'Attribute filteri-Multi-Valued' \ SYNTAX 1.3.6.1.4.1.1466.115.121.1.27)""" % attr_name schema = Schema(topology_m2.ms["supplier1"]) schema.add('attributeTypes', ATTR_VALUE)
def list_attributetypes(inst, basedn, log, args): log = log.getChild('list_attributetypes') schema = Schema(inst) if args is not None and args.json: print(dump_json(schema.get_attributetypes(json=True), indent=4)) else: for attributetype in schema.get_attributetypes(): print(attributetype)
def list_objectclasses(inst, basedn, log, args): log = log.getChild('list_objectclasses') schema = Schema(inst) if args is not None and args.json: print(dump_json(schema.get_objectclasses(json=True), indent=4)) else: for oc in schema.get_objectclasses(): print(oc)
def list_matchingrules(inst, basedn, log, args): log = log.getChild('list_matchingrules') schema = Schema(inst) if args is not None and args.json: print(dump_json(schema.get_matchingrules(json=True), indent=4)) else: for mr in schema.get_matchingrules(): print(mr)
def get_syntaxes(inst, basedn, log, args): log = log.getChild('get_syntaxes') schema = Schema(inst) result = schema.get_attr_syntaxes(json=args.json) if args.json: print(dump_json(result, indent=4)) else: for id, name in result.items(): print("%s (%s)", name, id)
def reload_schema(inst, basedn, log, args): schema = Schema(inst) log.info( 'Attempting to add task entry... This will fail if Schema Reload plug-in is not enabled.' ) task = schema.reload(args.schemadir) log.info('Successfully added task entry ' + task.dn) log.info( "To verify that the schema reload operation was successful, please check the error logs." )
def validate_syntax(inst, basedn, log, args): schema = Schema(inst) log.info('Attempting to add task entry...') validate_task = schema.validate_syntax(args.DN, args.filter) validate_task.wait() exitcode = validate_task.get_exit_code() if exitcode != 0: log.error(f'Validate syntax task for {args.DN} has failed. Please, check logs') else: log.info('Successfully added task entry')
def edit_attributetype(inst, basedn, log, args): log = log.getChild('edit_attributetype') schema = Schema(inst) parameters = _get_parameters(args, 'attributetypes') aliases = parameters.pop("aliases", None) if aliases is not None and aliases != [""]: parameters["names"].extend(aliases) schema.edit_attributetype(args.name, parameters) print("Successfully changed the attributetype")
def query_matchingrule(inst, basedn, log, args): log = log.getChild('query_matchingrule') schema = Schema(inst) # Need the query type attr = _get_arg(args.name, msg="Enter attribute to query") result = schema.query_matchingrule(attr, json=args.json) if args.json: print(dump_json(result, indent=4)) else: print(result)
def query_objectclass(inst, basedn, log, args): log = log.getChild('query_objectclass') schema = Schema(inst) # Need the query type oc = _get_arg(args.name, msg="Enter objectclass to query") result = schema.query_objectclass(oc, json=args.json) if args.json: print(dump_json(result, indent=4)) else: print(result)
def test_valid_schema(topo): """Test schema-reload task with valid schema :id: 2ab304c0-3e58-4d34-b23b-a14b5997c7a8 :setup: Standalone instance :steps: 1. Create schema file with valid schema 2. Run schema-reload.pl script 3. Run ldapsearch and check if schema was added :expectedresults: 1. File creation should work 2. The schema reload task should be successful 3. Searching the server should return the new schema """ log.info("Test schema-reload task with valid schema") # Step 1 - Create schema file log.info("Create valid schema file (99user.ldif)...") schema_filename = (topo.standalone.schemadir + "/99user.ldif") try: with open(schema_filename, 'w') as schema_file: schema_file.write("dn: cn=schema\n") schema_file.write( "attributetypes: ( 8.9.10.11.12.13.13 NAME " + "'ValidAttribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15" + " X-ORIGIN 'Mozilla Dummy Schema' )\n") schema_file.write( "objectclasses: ( 1.2.3.4.5.6.7.8 NAME 'TestObject' " + "SUP top MUST ( objectclass $ cn ) MAY ( givenName $ " + "sn $ ValidAttribute ) X-ORIGIN 'user defined' )')\n") os.chmod(schema_filename, 0o777) except OSError as e: log.fatal("Failed to create schema file: " + "{} Error: {}".format(schema_filename, str(e))) # Step 2 - Run the schema-reload task log.info("Run the schema-reload task...") schema = Schema(topo.standalone) task = schema.reload(schema_dir=topo.standalone.schemadir) task.wait() assert task.get_exit_code() == 0, "The schema reload task failed" # Step 3 - Verify valid schema was added to the server log.info("Check cn=schema to verify the valid schema was added") subschema = topo.standalone.schema.get_subschema() oc_obj = subschema.get_obj(ldap.schema.ObjectClass, 'TestObject') assert oc_obj is not None, "The new objectclass was not found on server" at_obj = subschema.get_obj(ldap.schema.AttributeType, 'ValidAttribute') assert at_obj is not None, "The new attribute was not found on server"
def reload_schema(inst, basedn, log, args): log = log.getChild('reload_schema') schema = Schema(inst) print('Attempting to add task entry... This will fail if Schema Reload plug-in is not enabled.') task = schema.reload(args.schemadir) if args.wait: task.wait() rc = task.get_exit_code() if rc == 0: print("Schema reload task ({}) successfully finished.".format(task.dn)) else: raise ValueError("Schema reload task failed, please check the errors log for more information") else: print('Successfully added task entry {}'.format(task.dn)) print("To verify that the schema reload operation was successful, please check the error logs.")
def test_schema_reload(topo): """Run a schema reload task :id: 995acc60-243b-45b0-9c1c-12bbe6a2882e :setup: Standalone Instance :steps: 1. Add schema reload task 2. Schema reload task succeeds :expectedresults: 1. Success 2. Success """ schema = Schema(topo.standalone) task = schema.reload() assert task.exists() task.wait() assert task.get_exit_code() == 0
def query_attributetype(inst, basedn, log, args): log = log.getChild('query_attributetype') schema = Schema(inst) # Need the query type attr = _get_arg(args.name, msg="Enter attribute to query") if args.json: print(dump_json(schema.query_attributetype(attr, json=args.json), indent=4)) else: attributetype, must, may = schema.query_attributetype(attr, json=args.json) print(attributetype) print("") print("MUST") for oc in must: print(oc) print("") print("MAY") for oc in may: print(oc)
def test_origins_with_extra_parenthesis(topo): """Test the custom schema with extra parenthesis in X-ORIGIN can be parsed into JSON :id: 4230f83b-0dc3-4bc4-a7a8-5ab0826a4f05 :setup: Standalone Instance :steps: 1. Add attribute with X-ORIGIN that contains extra parenthesis 2. Querying for that attribute with JSON flag :expectedresults: 1. Success 2. Success """ ATTR_NAME = 'testAttribute' X_ORG_VAL = 'test (TEST)' schema = Schema(topo.standalone) # Add new attribute parameters = { 'names': [ATTR_NAME], 'oid': '1.1.1.1.1.1.1.22222', 'desc': 'Test extra parenthesis in X-ORIGIN', 'x_origin': X_ORG_VAL, 'syntax': '1.3.6.1.4.1.1466.115.121.1.15', 'syntax_len': None, 'x_ordered': None, 'collective': None, 'obsolete': None, 'single_value': None, 'no_user_mod': None, 'equality': None, 'substr': None, 'ordering': None, 'usage': None, 'sup': None } schema.add_attributetype(parameters) # Search for attribute with JSON option attr_result = schema.query_attributetype(ATTR_NAME, json=True) # Verify the x-origin value is correct assert attr_result['at']['x_origin'][0] == X_ORG_VAL
def test_add_attribute_types(topology_st): """Test add attribute types to schema :id: 84d6dece-8cfc-11e9-89a3-8c16451d917b :setup: Standalone :steps: 1. Add new attribute types to schema. :expected results: 1. Pass """ for attribute in ATTR: Schema(topology_st.standalone).add('attributetypes', attribute)
def test_schema_reload_with_searches(topo): """Test that during the schema reload task there is a small window where the new schema is not loaded into the asi hashtables - this results in searches not returning entries. :id: 375f1fdc-a9ef-45de-984d-0b79a40ff219 :setup: Standalone instance :steps: 1. Create a test user 2. Run a schema_reload task while searching for our user 3. While we wait for the task to complete search for our user 4. Check the user is still being returned and if task is complete :expectedresults: 1. Operation should be successful 2. Operation should be successful 3. Operation should be successful 4. Operation should be successful """ log.info( 'Test the searches still work as expected during schema reload tasks') # Add a user users = UserAccounts(topo.standalone, DEFAULT_SUFFIX) user = users.create_test_user(uid=1) # Run a schema_reload tasks while searching for our user.Since # this is a race condition, run it several times. schema = Schema(topo.standalone) task = schema.reload(schema_dir=topo.standalone.schemadir) # While we wait for the task to complete search for our user search_count = 0 while search_count < 10: # Now check the user is still being returned # Check if task is complete assert user.exists() if task.get_exit_code() == 0: break time.sleep(1) search_count += 1
def test_invalid_uidnumber(topo, validate_syntax_off): """Test syntax-validate task with invalid uidNumber attribute value :id: 30fdcae6-ffa6-4ec4-8da9-6fb138fc1828 :setup: Standalone instance :steps: 1. Set nsslapd-syntaxcheck to off 2. Clean error log 3. Add a user with uidNumber attribute set to an invalid value (string) 4. Run syntax validate task 5. Assert that there is corresponding error in the error log 6. Set nsslapd-syntaxcheck to on :expectedresults: 1. It should succeed 2. It should succeed 3. It should succeed 4. It should succeed 5. It should succeed 6. It should succeed """ inst = topo.standalone log.info('Clean the error log') inst.deleteErrorLogs() users = UserAccounts(inst, DEFAULT_SUFFIX) users.create_test_user(uid="invalid_value") schema = Schema(inst) log.info('Attempting to add task entry...') validate_task = schema.validate_syntax(DEFAULT_SUFFIX) validate_task.wait() exitcode = validate_task.get_exit_code() assert exitcode == 0 error_lines = inst.ds_error_log.match( '.*uidNumber: value #0 invalid per syntax.*') assert (len(error_lines) == 1) log.info('Found an invalid entry with wrong uidNumber - Success')
def test_x_origin(topo): """ Test that the various forms of X-ORIGIN are handled correctly :id: 995acc60-243b-45b0-9c1c-12bbe6a2882d :setup: Standalone Instance :steps: 1. Create schema file with specific/unique formats for X-ORIGIN 2. Reload the schema file (schema reload task) 3. List all attributes without error 4. Confirm the expected results :expectedresults: 1. Success 2. Success 3. Success 4. Success """ # Create a custom schema file so we can tests specific formats schema_file_name = topo.standalone.schemadir + '/98test.ldif' schema_file = open(schema_file_name, "w") testschema = ( "dn: cn=schema\n" + "attributetypes: ( 8.9.10.11.12.13.16 NAME 'testattr' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORIGIN 'USER_DEFINED' )\n" + "attributetypes: ( 8.9.10.11.12.13.17 NAME 'testattrtwo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORIGIN ( 'USER_DEFINED' 'should be not ignored!!' ) )\n" ) schema_file.write(testschema) schema_file.close() # Reload the schema myschema = Schema(topo.standalone) task = myschema.reload() assert task.exists() task.wait() assert task.get_exit_code() == 0 # Now search attrs and make sure there are no errors myschema.get_attributetypes() myschema.get_objectclasses() # Check we have X-ORIGIN as expected assert " 'USER_DEFINED' " in str(myschema.query_attributetype("testattr")) assert " 'USER_DEFINED' " in str( myschema.query_attributetype("testattrtwo"))
def _create_entries(topology_st): """ Add attribute types to schema and Create filter entries(Entry with extensibleObject) """ for attribute in ATTR: Schema(topology_st.standalone).add('attributetypes', attribute) cos = CosTemplates(topology_st.standalone, DEFAULT_SUFFIX) # Entry with extensibleObject for attr, value in LIST_ATTR_TO_CREATE: cos.create(properties={ 'cn': attr, 'attr' + attr.split('entry')[1][:-1]: value })
def test_matching_rules(topology_st): """Test matching rules. :id: 8cb6e62a-8cfc-11e9-be9a-8c16451d917b :setup: Standalone :steps: 1. Search for matching rule. 2. Matching rule should be there in schema. :expected results: 1. Pass 2. Pass """ matchingrules = Schema(topology_st.standalone).get_matchingrules() assert matchingrules rules = set(matchingrule.names for matchingrule in matchingrules) rules1 = [role[0] for role in rules if len(role) != 0] for rule in TESTED_MATCHING_RULES: assert rule in rules1
def test_indexing_schema(topo, _create_entries): """Test with schema :id: 67a2179a-91ae-11e9-9a33-8c16451d917b :setup: Standalone :steps: 1. Add attribute types to Schema. 2. Try to pass filter rules as per the condition . :expected results: 1. Pass 2. Pass """ cos = CosTemplates(topo.standalone, DEFAULT_SUFFIX, rdn='ou=People') Schema(topo.standalone).add('attributetypes', "( 8.9.10.11.12.13.14.15 NAME 'myintattr' DESC 'for integer " "syntax index ordering testing' EQUALITY integerMatch ORDERING " "integerOrderingMatch SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 )") topo.standalone.restart() assert cos.filter("(myintattr>=-18446744073709551617)")
def schema_objectclass(ds, objectclass_name): if request.method == 'OPTIONS': # # Return a list of the allowed methods # msg = {'allow': 'GET PUT DELETE OPTIONS HEAD'} return jsonify(msg) try: objectclass = ds.schema.query_objectclass(objectclass_name) except ldap.LDAPError as e: return jsonResponse("Error getting objectclass: " % str(e), 403, request.url) # This does the right thing regardless of resource type if request.method == 'HEAD': # # Return the HEAD Response # if objectclass is None: return jsonResponse("", 404, request.url) return jsonResponse("", 200, request.url) if request.method == 'GET': if objectclass is None: return jsonResponse("Failed to find objectclass", 404, request.url) results = { 'names': objectclass.names, 'desc': objectclass.desc, 'kind': objectclass.kind, 'may': objectclass.may, 'must': objectclass.must, 'obsolete': objectclass.obsolete, 'oid': objectclass.oid, 'schema_attribute': objectclass.schema_attribute, 'sup': objectclass.sup, } return jsonResponse(results, 200, request.url) if request.method == 'PUT': json_req = request.get_json(force=True) if not json_req: return jsonResponse("JSON representation missing", 403, request.url) newoc = jsonOCToStr(json_req) attrObj = Schema(ds) try: attrObj.add_objectclass(newoc) except ldap.TYPE_OR_VALUE_EXISTS: # idempotent - return the entry as is return jsonResponse(json_req, 201, request.url + '/' + json_req['names'][0]) except ldap.LDAPError as e: return jsonResponse('Failed to add objectclass: %s' % str(e), 403, request.url) return jsonResponse(json_req, 201, request.url + '/' + json_req['names'][0]) if request.method == 'DELETE': schemaObj = Schema(ds) try: delete_val = None name_cmp = "'%s'" % objectclass_name entry = schemaObj.get_entry() ocs = entry.getValues('objectClasses') for val in ocs: if name_cmp.lower() in val.lower(): delete_val = val break if delete_val: ds.modify_s(DN_SCHEMA, [(ldap.MOD_DELETE, 'objectClasses', delete_val)]) return jsonResponse('', 200, request.url) else: # idempotent, return success return jsonResponse('', 200, request.url) except ldap.LDAPError as e: return jsonResponse('Failed to delete objectclass: %s' % str(e), 403, request.url)
def apply(self, inst): schema = Schema(inst) inst.log.debug("SchemaClassCreate -> %s" % self.obj.schema_str()) schema.add(self.obj.schema_attribute, self.obj.schema_str())
def schema_attribute(ds, attribute_name): if request.method == 'OPTIONS': # # Return a list of the allowed methods # msg = {'allow': 'GET PUT DELETE OPTIONS HEAD'} return jsonify(msg) try: result = ds.schema.query_attributetype(attribute_name) except ldap.LDAPError as e: return jsonResponse("Error getting attribute: " % str(e), 403, request.url) if request.method == 'HEAD': # # Return the HEAD Response # if result is None: return jsonResponse("", 404, request.url) return jsonResponse("", 200, request.url) if request.method == 'GET': if result is None: return jsonResponse("Failed to find attribute", 404, request.url) # Split the tuple attribute, must, may = result results = {'names': attribute.names, 'desc': attribute.desc, 'oid': attribute.oid, 'ordering': attribute.ordering, 'single_value': attribute.single_value, 'equality': attribute.equality, 'substr': attribute.substr, 'syntax': attribute.syntax, 'usage': attribute.usage, 'obsolete': attribute.obsolete, 'no_user_mod': attribute.no_user_mod, 'sup': attribute.sup, 'must': map(lambda x: x.names[0], must), 'may': map(lambda x: x.names[0], may)} return jsonResponse(results, 200, request.url) if request.method == 'PUT': json_req = request.get_json(force=True) if not json_req: return jsonResponse("JSON representation missing", 403, request.url) newattr = jsonAttrToStr(json_req) attrObj = Schema(ds) try: attrObj.add_attribute(newattr) except ldap.TYPE_OR_VALUE_EXISTS: # idempotent - return the entry as is return jsonResponse(json_req, 201, request.url + '/' + json_req['names'][0]) except ldap.LDAPError as e: return jsonResponse('Failed to add attribute: %s' % str(e), 403, request.url) return jsonResponse(json_req, 201, request.url + '/' + json_req['names'][0]) if request.method == 'DELETE': schemaObj = Schema(ds) try: delete_val = None name_cmp = "'%s'" % attribute_name entry = schemaObj.get_entry() attributes = entry.getValues('attributeTypes') for val in attributes: if name_cmp.lower() in val.lower(): delete_val = val break if delete_val: ds.modify_s(DN_SCHEMA, [(ldap.MOD_DELETE, 'attributeTypes', delete_val)]) return jsonResponse('', 200, request.url) else: # Idempotent - no val, already deleted, return success return jsonResponse('', 200, request.url) except ldap.LDAPError as e: return jsonResponse('Failed to delete attribute: %s' % str(e), 403, request.url)