def patch_one(self, zone_transfer_request_id): """Update ZoneTransferRequest""" request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # Fetch the existing zone_transfer_request zt_request = self.central_api.get_zone_transfer_request( context, zone_transfer_request_id) # Convert to APIv2 Format zt_request_data = self._view.show(context, request, zt_request) if request.content_type == 'application/json-patch+json': raise NotImplemented('json-patch not implemented') else: zt_request_data = utils.deep_dict_merge(zt_request_data, body) # Validate the request conforms to the schema self._resource_schema.validate(zt_request_data) zt_request.update(self._view.load(context, request, body)) zt_request = self.central_api.update_zone_transfer_request( context, zt_request) response.status_int = 200 return self._view.show(context, request, zt_request)
def patch_one(self, zone_id, recordset_id, record_id): """ Update Record """ request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # TODO(kiall): Validate we have a sane UUID for zone_id and # recordset_id # Fetch the existing record record = central_api.get_record(context, zone_id, recordset_id, record_id) # Convert to APIv2 Format record = self._view.show(context, request, record) if request.content_type == 'application/json-patch+json': raise NotImplemented('json-patch not implemented') else: record = utils.deep_dict_merge(record, body) # Validate the request conforms to the schema self._resource_schema.validate(record) values = self._view.load(context, request, body) record = central_api.update_record( context, zone_id, recordset_id, record_id, values) if record['status'] == 'PENDING': response.status_int = 202 else: response.status_int = 200 return self._view.show(context, request, record)
def patch_one(self, zone_id, recordset_id, record_id): """ Update Record """ request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # Fetch the existing record record = self.central_api.get_record(context, zone_id, recordset_id, record_id) # Convert to APIv2 Format record = self._view.show(context, request, record) if request.content_type == 'application/json-patch+json': raise NotImplemented('json-patch not implemented') else: record = utils.deep_dict_merge(record, body) # Validate the request conforms to the schema self._resource_schema.validate(record) values = self._view.load(context, request, body) record = self.central_api.update_record(context, zone_id, recordset_id, record_id, values) if record['status'] == 'PENDING': response.status_int = 202 else: response.status_int = 200 return self._view.show(context, request, record)
def test_deep_dict_merge(self): a = { 'a': { 'dns': 'record' }, 'b': 'b', 'c': 'c', } b = { 'a': { 'domain': 'zone' }, 'c': 1, 'd': 'd', } self.assertEqual( { 'a': { 'dns': 'record', 'domain': 'zone' }, 'b': 'b', 'c': 1, 'd': 'd' }, utils.deep_dict_merge(a, b))
def patch_one(self, pool_id): """Update the specific pool""" request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # Fetch the existing pool pool = self.central_api.get_pool(context, pool_id) # Convert to APIv2 Format pool_data = self._view.show(context, request, pool) if request.content_type == 'application/json-patch+json': raise NotImplemented('json-patch not implemented') else: pool_data = utils.deep_dict_merge(pool_data, body) # Validate the new set of data self._resource_schema.validate(pool_data) # Update and persist the resource pool.update(self._view.load(context, request, body)) pool = self.central_api.update_pool(context, pool) response.status_int = 200 return self._view.show(context, request, pool)
def patch_one(self, blacklist_id): """Update Blacklisted Zone""" request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # Fetch the existing blacklist entry blacklist = self.central_api.get_blacklist(context, blacklist_id) # Convert to APIv2 Format blacklist_data = self._view.show(context, request, blacklist) if request.content_type == 'application/json-patch+json': raise NotImplemented('json-patch not implemented') else: blacklist_data = utils.deep_dict_merge(blacklist_data, body) # Validate the new set of data self._resource_schema.validate(blacklist_data) # Update and persist the resource blacklist.update(self._view.load(context, request, body)) blacklist = self.central_api.update_blacklist(context, blacklist) response.status_int = 200 return self._view.show(context, request, blacklist)
def patch_one(self, tsigkey_id): """Update TsigKey""" request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # Fetch the existing tsigkey entry tsigkey = self.central_api.get_tsigkey(context, tsigkey_id) # Convert to APIv2 Format tsigkey_data = self._view.show(context, request, tsigkey) if request.content_type == 'application/json-patch+json': raise NotImplemented('json-patch not implemented') else: tsigkey_data = utils.deep_dict_merge(tsigkey_data, body) # Validate the new set of data self._resource_schema.validate(tsigkey_data) # Update and persist the resource tsigkey.update(self._view.load(context, request, body)) tsigkey = self.central_api.update_tsigkey(context, tsigkey) response.status_int = 200 return self._view.show(context, request, tsigkey)
def patch_one(self, zone_id, recordset_id): """ Update RecordSet """ request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # TODO(kiall): Validate we have a sane UUID for zone_id and # recordset_id # Fetch the existing recordset recordset = central_api.get_recordset(context, zone_id, recordset_id) # Convert to APIv2 Format recordset = self._view.show(context, request, recordset) if request.content_type == 'application/json-patch+json': raise NotImplemented('json-patch not implemented') else: recordset = utils.deep_dict_merge(recordset, body) # Validate the request conforms to the schema self._resource_schema.validate(recordset) values = self._view.load(context, request, body) recordset = central_api.update_recordset(context, zone_id, recordset_id, values) response.status_int = 200 return self._view.show(context, request, recordset)
def patch_one(self, blacklist_id): """ Update Blacklisted Zone """ request = pecan.request context = request.environ["context"] body = request.body_dict response = pecan.response # Fetch the existing blacklisted zone blacklist = self.central_api.get_blacklist(context, blacklist_id) # Convert to APIv2 Format blacklist = self._view.show(context, request, blacklist) if request.content_type == "application/json-patch+json": raise NotImplemented("json-patch not implemented") else: blacklist = utils.deep_dict_merge(blacklist, body) # Validate the request conforms to the schema self._resource_schema.validate(blacklist) values = self._view.load(context, request, body) blacklist = self.central_api.update_blacklist(context, blacklist_id, values) response.status_int = 200 return self._view.show(context, request, blacklist)
def patch_one(self, tld_id): """ Update Tld """ request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # Fetch the existing tld tld = central_api.get_tld(context, tld_id) # Convert to APIv2 Format tld = self._view.show(context, request, tld) if request.content_type == 'application/json-patch+json': raise NotImplemented('json-patch not implemented') else: tld = utils.deep_dict_merge(tld, body) # Validate the request conforms to the schema self._resource_schema.validate(tld) values = self._view.load(context, request, body) tld = central_api.update_tld(context, tld_id, values) response.status_int = 200 return self._view.show(context, request, tld)
def patch_one(self, zone_id): """Update Zone""" # TODO(kiall): This needs cleanup to say the least.. request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # TODO(kiall): Validate we have a sane UUID for zone_id # Fetch the existing zone zone = self.central_api.get_domain(context, zone_id) # Convert to APIv2 Format zone_data = self._view.show(context, request, zone) if request.content_type == 'application/json-patch+json': # Possible pattern: # # 1) Load existing zone. # 2) Apply patch, maintain list of changes. # 3) Return changes, after passing through the code ^ for plain # JSON. # # Difficulties: # # 1) "Nested" resources? records inside a recordset. # 2) What to do when a zone doesn't exist in the first place? # 3) ...? raise NotImplemented('json-patch not implemented') else: zone_data = utils.deep_dict_merge(zone_data, body) # Validate the new set of data self._resource_schema.validate(zone_data) # Update and persist the resource zone.update(self._view.load(context, request, body)) zone = self.central_api.update_domain(context, zone) if zone.status == 'PENDING': response.status_int = 202 else: response.status_int = 200 return self._view.show(context, request, zone)
def patch_one(self, zone_id): """ Update Zone """ # TODO(kiall): This needs cleanup to say the least.. request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # TODO(kiall): Validate we have a sane UUID for zone_id # Fetch the existing zone zone = central_api.get_domain(context, zone_id) # Convert to APIv2 Format zone = self._view.show(context, request, zone) if request.content_type == 'application/json-patch+json': # Possible pattern: # # 1) Load existing zone. # 2) Apply patch, maintain list of changes. # 3) Return changes, after passing through the code ^ for plain # JSON. # # Difficulties: # # 1) "Nested" resources? records inside a recordset. # 2) What to do when a zone doesn't exist in the first place? # 3) ...? raise NotImplemented('json-patch not implemented') else: zone = utils.deep_dict_merge(zone, body) # Validate the request conforms to the schema self._resource_schema.validate(zone) values = self._view.load(context, request, body) zone = central_api.update_domain(context, zone_id, values) if zone['status'] == 'PENDING': response.status_int = 202 else: response.status_int = 200 return self._view.show(context, request, zone)
def put_one(self, zone_id, recordset_id): """Update RecordSet""" request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # Fetch the existing recordset recordset = self.central_api.get_recordset(context, zone_id, recordset_id) # SOA recordsets cannot be updated manually if recordset['type'] == 'SOA': raise exceptions.BadRequest( 'Updating SOA recordsets is now allowed') # NS recordsets at the zone root cannot be manually updated if recordset['type'] == 'NS': zone = self.central_api.get_domain(context, zone_id) if recordset['name'] == zone['name']: raise exceptions.BadRequest( 'Updating a root zone NS record is not allowed') # Convert to APIv2 Format recordset_data = self._view.show(context, request, recordset) recordset_data = utils.deep_dict_merge(recordset_data, body) # Validate the new set of data self._resource_schema.validate(recordset_data) # Update and persist the resource recordset.update(self._view.load(context, request, body)) recordset = self.central_api.update_recordset(context, recordset) response.status_int = 200 return self._view.show(context, request, recordset)
def validate(self): LOG.debug("Validating '%(name)s' object with values: %(values)r", { 'name': self.obj_name(), 'values': self.to_dict(), }) LOG.debug(list(self.records)) errors = ValidationErrorList() # Get the right classes (e.g. A for Recordsets with type: 'A') try: record_list_cls = self.obj_cls_from_name('%sList' % self.type) record_cls = self.obj_cls_from_name(self.type) except (KeyError, ovo_exc.UnsupportedObjectError) as e: err_msg = ("'%(type)s' is not a valid record type" % {'type': self.type}) self._validate_fail(errors, err_msg) if self.type not in cfg.CONF.supported_record_type: err_msg = ("'%(type)s' is not a supported record type" % {'type': self.type}) self._validate_fail(errors, err_msg) # Get any rules that the record type imposes on the record changes = record_cls.get_recordset_schema_changes() old_fields = {} if changes: LOG.debug("Record %s is overriding the RecordSet schema with: %s", record_cls.obj_name(), changes) old_fields = deepcopy(self.FIELDS) self.FIELDS = utils.deep_dict_merge(self.FIELDS, changes) error_indexes = [] # Copy these for safekeeping old_records = deepcopy(self.records) # Blank the records for this object with the right list type self.records = record_list_cls() i = 0 for record in old_records: record_obj = record_cls() try: record_obj._from_string(record.data) # The _from_string() method will throw a ValueError if there is not # enough data blobs except ValueError as e: # Something broke in the _from_string() method # Fake a correct looking ValidationError() object e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % {'data': record.data, 'type': self.type}) # Add it to the list for later errors.append(e) error_indexes.append(i) except TypeError as e: e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % {'data': record.data, 'type': self.type}) # Add it to the list for later errors.append(e) error_indexes.append(i) except AttributeError as e: e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % {'data': record.data, 'type': self.type}) # Add it to the list for later errors.append(e) error_indexes.append(i) except Exception as e: error_message = str.format( 'Provided object is not valid. ' 'Got a %s error with message %s' % (type(e).__name__, six.text_type(e))) raise exceptions.InvalidObject(error_message) else: # Seems to have loaded right - add it to be validated by # JSONSchema self.records.append(record_obj) i += 1 try: # Run the actual validate code super(RecordSet, self).validate() except exceptions.InvalidObject as e: raise e else: # If JSONSchema passes, but we found parsing errors, # raise an exception if len(errors) > 0: LOG.debug( "Error Validating '%(name)s' object with values: " "%(values)r", { 'name': self.obj_name(), 'values': self.to_dict(), } ) raise exceptions.InvalidObject( "Provided object does not match " "schema", errors=errors, object=self) finally: if old_fields: self.FIELDS = old_fields # Send in the traditional Record objects to central / storage self.records = old_records
def validate(self): LOG.debug("Validating '%(name)s' object with values: %(values)r", { 'name': self.obj_name(), 'values': self.to_dict(), }) errors = ValidationErrorList() # Get the right classes (e.g. A for Recordsets with type: 'A') try: record_list_cls = self.obj_cls_from_name('%sList' % self.type) record_cls = self.obj_cls_from_name(self.type) except KeyError as e: e = ValidationError() e.path = ['recordset', 'type'] e.validator = 'value' e.validator_value = [self.type] e.message = ("'%(type)s' is not a supported Record type" % {'type': self.type}) # Add it to the list for later errors.append(e) raise exceptions.InvalidObject( "Provided object does not match " "schema", errors=errors, object=self) # Get any rules that the record type imposes on the record changes = record_cls.get_recordset_schema_changes() old_fields = {} if changes: LOG.debug("Record %s is overriding the RecordSet schema with: %s" % (record_cls.obj_name(), changes)) old_fields = deepcopy(self.FIELDS) self.FIELDS = utils.deep_dict_merge(self.FIELDS, changes) error_indexes = [] # Copy these for safekeeping old_records = deepcopy(self.records) # Blank the records for this object with the right list type self.records = record_list_cls() i = 0 for record in old_records: record_obj = record_cls() try: record_obj._from_string(record.data) # The _from_string() method will throw a ValueError if there is not # enough data blobs except ValueError as e: # Something broke in the _from_string() method # Fake a correct looking ValidationError() object e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % {'data': record.data, 'type': self.type}) # Add it to the list for later errors.append(e) error_indexes.append(i) except TypeError as e: e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % {'data': record.data, 'type': self.type}) # Add it to the list for later errors.append(e) error_indexes.append(i) except AttributeError as e: e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % {'data': record.data, 'type': self.type}) # Add it to the list for later errors.append(e) error_indexes.append(i) except Exception as e: error_message = str.format( 'Provided object is not valid. ' 'Got a %s error with message %s' % (type(e).__name__, six.text_type(e))) raise exceptions.InvalidObject(error_message) else: # Seems to have loaded right - add it to be validated by # JSONSchema self.records.append(record_obj) i += 1 try: # Run the actual validate code super(RecordSet, self).validate() except exceptions.InvalidObject as e: # Something is wrong according to JSONSchema - append our errors increment = 0 # This code below is to make sure we have the index for the record # list correct. JSONSchema may be missing some of the objects due # to validation above, so this re - inserts them, and makes sure # the index is right for error in e.errors: if len(error.path) > 1 and isinstance(error.path[1], int): error.path[1] += increment while error.path[1] in error_indexes: increment += 1 error.path[1] += 1 # Add the list from above e.errors.extend(errors) # Raise the exception raise e else: # If JSONSchema passes, but we found parsing errors, # raise an exception if len(errors) > 0: LOG.debug( "Error Validating '%(name)s' object with values: " "%(values)r", { 'name': self.obj_name(), 'values': self.to_dict(), } ) raise exceptions.InvalidObject( "Provided object does not match " "schema", errors=errors, object=self) finally: if old_fields: self.FIELDS = old_fields # Send in the traditional Record objects to central / storage self.records = old_records
def test_deep_dict_merge_not_dict(self): result = utils.deep_dict_merge(dict(), list()) self.assertIsInstance(result, list)
def put_one(self, zone_id, recordset_id): """Update RecordSet""" request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # Fetch the existing recordset recordset = self.central_api.get_recordset(context, zone_id, recordset_id) # SOA recordsets cannot be updated manually if recordset['type'] == 'SOA': raise exceptions.BadRequest( 'Updating SOA recordsets is not allowed') # NS recordsets at the zone root cannot be manually updated if recordset['type'] == 'NS': zone = self.central_api.get_domain(context, zone_id) if recordset['name'] == zone['name']: raise exceptions.BadRequest( 'Updating a root zone NS record is not allowed') # Convert to APIv2 Format recordset_data = self._view.show(context, request, recordset) recordset_data = utils.deep_dict_merge(recordset_data, body) new_recordset = self._view.load(context, request, body) # Validate the new set of data self._resource_schema.validate(recordset_data) # Get original list of Records original_records = set() for record in recordset.records: original_records.add(record.data) # Get new list of Records new_records = set() if 'records' in new_recordset: for record in new_recordset['records']: new_records.add(record.data) # Get differences of Records records_to_add = new_records.difference(original_records) records_to_rm = original_records.difference(new_records) # Update all items except records record_update = False if 'records' in new_recordset: record_update = True del new_recordset['records'] recordset.update(new_recordset) # Remove deleted records if we have provided a records array if record_update: recordset.records[:] = [ record for record in recordset.records if record.data not in records_to_rm ] # Add new records for record in records_to_add: recordset.records.append(Record(data=record)) # Persist the resource recordset = self.central_api.update_recordset(context, recordset) if recordset['status'] == 'PENDING': response.status_int = 202 else: response.status_int = 200 return self._view.show(context, request, recordset)
def validate(self): LOG.debug("Validating '%(name)s' object with values: %(values)r", { 'name': self.obj_name(), 'values': self.to_dict(), }) errors = ValidationErrorList() # Get the right classes (e.g. A for Recordsets with type: 'A') try: record_list_cls = self.obj_cls_from_name('%sList' % self.type) record_cls = self.obj_cls_from_name(self.type) except KeyError as e: e = ValidationError() e.path = ['recordset', 'type'] e.validator = 'value' e.validator_value = [self.type] e.message = ("'%(type)s' is not a supported Record type" % { 'type': self.type }) # Add it to the list for later errors.append(e) raise exceptions.InvalidObject( "Provided object does not match " "schema", errors=errors, object=self) # Get any rules that the record type imposes on the record changes = record_cls.get_recordset_schema_changes() old_fields = {} if changes: LOG.debug("Record %s is overriding the RecordSet schema with: %s" % (record_cls.obj_name(), changes)) old_fields = deepcopy(self.FIELDS) self.FIELDS = utils.deep_dict_merge(self.FIELDS, changes) error_indexes = [] # Copy these for safekeeping old_records = deepcopy(self.records) # Blank the records for this object with the right list type self.records = record_list_cls() i = 0 for record in old_records: record_obj = record_cls() try: record_obj._from_string(record.data) # The _from_string() method will throw a ValueError if there is not # enough data blobs except ValueError as e: # Something broke in the _from_string() method # Fake a correct looking ValidationError() object e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % { 'data': record.data, 'type': self.type }) # Add it to the list for later errors.append(e) error_indexes.append(i) except TypeError as e: e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % { 'data': record.data, 'type': self.type }) # Add it to the list for later errors.append(e) error_indexes.append(i) except AttributeError as e: e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % { 'data': record.data, 'type': self.type }) # Add it to the list for later errors.append(e) error_indexes.append(i) except Exception as e: error_message = str.format( 'Provided object is not valid. ' 'Got a %s error with message %s' % (type(e).__name__, six.text_type(e))) raise exceptions.InvalidObject(error_message) else: # Seems to have loaded right - add it to be validated by # JSONSchema self.records.append(record_obj) i += 1 try: # Run the actual validate code super(RecordSet, self).validate() except exceptions.InvalidObject as e: # Something is wrong according to JSONSchema - append our errors increment = 0 # This code below is to make sure we have the index for the record # list correct. JSONSchema may be missing some of the objects due # to validation above, so this re - inserts them, and makes sure # the index is right for error in e.errors: if len(error.path) > 1 and isinstance(error.path[1], int): error.path[1] += increment while error.path[1] in error_indexes: increment += 1 error.path[1] += 1 # Add the list from above e.errors.extend(errors) # Raise the exception raise e else: # If JSONSchema passes, but we found parsing errors, # raise an exception if len(errors) > 0: LOG.debug( "Error Validating '%(name)s' object with values: " "%(values)r", { 'name': self.obj_name(), 'values': self.to_dict(), }) raise exceptions.InvalidObject( "Provided object does not match " "schema", errors=errors, object=self) finally: if old_fields: self.FIELDS = old_fields # Send in the traditional Record objects to central / storage self.records = old_records
def validate(self): LOG.debug("Validating '%(name)s' object with values: %(values)r", { 'name': self.obj_name(), 'values': self.to_dict(), }) LOG.debug(list(self.records)) errors = ValidationErrorList() # Get the right classes (e.g. A for Recordsets with type: 'A') try: record_list_cls = self.obj_cls_from_name('%sList' % self.type) record_cls = self.obj_cls_from_name(self.type) except (KeyError, ovo_exc.UnsupportedObjectError) as e: err_msg = ("'%(type)s' is not a valid record type" % { 'type': self.type }) self._validate_fail(errors, err_msg) if self.type not in cfg.CONF.supported_record_type: err_msg = ("'%(type)s' is not a supported record type" % { 'type': self.type }) self._validate_fail(errors, err_msg) # Get any rules that the record type imposes on the record changes = record_cls.get_recordset_schema_changes() old_fields = {} if changes: LOG.debug("Record %s is overriding the RecordSet schema with: %s", record_cls.obj_name(), changes) old_fields = deepcopy(self.FIELDS) self.FIELDS = utils.deep_dict_merge(self.FIELDS, changes) error_indexes = [] # Copy these for safekeeping old_records = deepcopy(self.records) # Blank the records for this object with the right list type self.records = record_list_cls() i = 0 for record in old_records: record_obj = record_cls() try: record_obj._from_string(record.data) # The _from_string() method will throw a ValueError if there is not # enough data blobs except ValueError as e: # Something broke in the _from_string() method # Fake a correct looking ValidationError() object e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % { 'data': record.data, 'type': self.type }) # Add it to the list for later errors.append(e) error_indexes.append(i) except TypeError as e: e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % { 'data': record.data, 'type': self.type }) # Add it to the list for later errors.append(e) error_indexes.append(i) except AttributeError as e: e = ValidationError() e.path = ['records', i] e.validator = 'format' e.validator_value = [self.type] e.message = ("'%(data)s' is not a '%(type)s' Record" % { 'data': record.data, 'type': self.type }) # Add it to the list for later errors.append(e) error_indexes.append(i) except Exception as e: error_message = ('Provided object is not valid. Got a %s error' ' with message %s' % (type(e).__name__, six.text_type(e))) raise exceptions.InvalidObject(error_message) else: # Seems to have loaded right - add it to be validated by # JSONSchema self.records.append(record_obj) i += 1 try: # Run the actual validate code super(RecordSet, self).validate() except exceptions.InvalidObject as e: raise e else: # If JSONSchema passes, but we found parsing errors, # raise an exception if len(errors) > 0: LOG.debug( "Error Validating '%(name)s' object with values: " "%(values)r", { 'name': self.obj_name(), 'values': self.to_dict(), }) raise exceptions.InvalidObject( "Provided object does not match " "schema", errors=errors, object=self) finally: if old_fields: self.FIELDS = old_fields # Send in the traditional Record objects to central / storage self.records = old_records
def patch_one(self, zone_id): """Update Zone""" # TODO(kiall): This needs cleanup to say the least.. request = pecan.request context = request.environ['context'] body = request.body_dict response = pecan.response # TODO(kiall): Validate we have a sane UUID for zone_id # Fetch the existing zone zone = self.central_api.get_domain(context, zone_id) # Convert to APIv2 Format zone_data = self._view.show(context, request, zone) if request.content_type == 'application/json-patch+json': # Possible pattern: # # 1) Load existing zone. # 2) Apply patch, maintain list of changes. # 3) Return changes, after passing through the code ^ for plain # JSON. # # Difficulties: # # 1) "Nested" resources? records inside a recordset. # 2) What to do when a zone doesn't exist in the first place? # 3) ...? raise NotImplemented('json-patch not implemented') else: zone_data = utils.deep_dict_merge(zone_data, body) # Validate the new set of data self._resource_schema.validate(zone_data) # Unpack the values values = self._view.load(context, request, body) zone.set_masters(values.pop('masters', [])) # If masters are specified then we set zone.transferred_at to None # which will cause a new transfer if 'attributes' in zone.obj_what_changed(): zone.transferred_at = None # Update and persist the resource zone.update(values) if zone.type == 'SECONDARY' and 'email' in zone.obj_what_changed(): msg = "Changed email is not allowed." raise exceptions.InvalidObject(msg) increment_serial = zone.type == 'PRIMARY' zone = self.central_api.update_domain( context, zone, increment_serial=increment_serial) if zone.status == 'PENDING': response.status_int = 202 else: response.status_int = 200 return self._view.show(context, request, zone)