def __validate_required_fields__(self, account_info): """ Validate required data fields Raises an exception if missing data """ if not hasattr(account_info.configuration, OLD_PASSWORD_KEY) or \ account_info.configuration.password is None: error = "Attribute '%s' is required" % OLD_PASSWORD_KEY raise DataValidationFailed(error) if not hasattr(account_info.configuration, NEW_PASSWORD_KEY) or \ account_info.configuration.new_password is None: error = "Attribute '%s' is required" % NEW_PASSWORD_KEY raise DataValidationFailed(error)
def verify_attribute_type(column_name, column_data, request_data): data = request_data data_type = type(data) valid_types = column_data.type.python_types # If column is a list, data must be a list if column_data.is_list: valid_types = [list] # If column is a dictionary, data must be a dictionary elif column_data.is_dict: valid_types = [dict] # If data is a list but column is not, # we expect a single value in the list elif data_type is list: if len(data) == 1: data = data[0] data_type = type(data) try: if data_type in valid_types: # Check each value's type for elements in lists and dictionaries if column_data.n_max > 1: verify_container_values_type(column_name, column_data, request_data) # Now check for invalid values verify_valid_attribute_values(data, column_data, column_name) else: error = "Attribute type mismatch for column %s" % column_name raise DataValidationFailed(error) except DataValidationFailed as e: raise e
def check_config_type(self, request_type): app_log.debug('Requested config type: %s', request_type) if request_type not in [CONFIG_TYPE_RUNNING, CONFIG_TYPE_STARTUP]: error = "Invalid configuration type. Configuration "\ "types allowed: %s, %s" %\ (CONFIG_TYPE_RUNNING, CONFIG_TYPE_STARTUP) raise DataValidationFailed(error)
def post(self): try: app_log.debug("Executing Login POST...") username = self.get_argument(USERNAME_KEY) check_user_login_authorization(username) login_success = userauth.handle_user_login(self) if not login_success: raise AuthenticationFailed('invalid username/password ' 'combination') else: self.set_status(httplib.OK) except MissingArgumentError as e: self.on_exception( DataValidationFailed('Missing username or ' 'password')) except APIException as e: self.on_exception(e) except Exception as e: self.on_exception(e) self.finish()
def verify_put_data(data, resource, schema, idl): if OVSDB_SCHEMA_CONFIG not in data: raise DataValidationFailed("Missing %s data" % OVSDB_SCHEMA_CONFIG) _data = data[OVSDB_SCHEMA_CONFIG] # We neet to verify System table if resource.next is None: resource_verify = resource else: resource_verify = resource.next # Get the targeted row row = idl.tables[resource_verify.table].rows[resource_verify.row] # verify config and reference columns data verified_data = {} try: verified_config_data = verify_config_data(resource_verify, _data, schema, REQUEST_TYPE_UPDATE, row) verified_data.update(verified_config_data) verified_reference_data = verify_forward_reference( _data, resource_verify, schema, idl) verified_data.update(verified_reference_data) is_root = schema.ovs_tables[resource_verify.table].is_root # Reference by is not allowed in put if resource.relation == OVSDB_SCHEMA_TOP_LEVEL and not is_root: if OVSDB_SCHEMA_REFERENCED_BY in data: app_log.info('referenced_by is not allowed for PUT') error = "Attribute %s not allowed for PUT"\ % OVSDB_SCHEMA_REFERENCED_BY raise DataValidationFailed(error) except DataValidationFailed as e: raise e # data verified return verified_data
def validate_keywords(self, query_args): error_fields = [] if query_args: for k, v in query_args.iteritems(): if not (k in self.FILTER_KEYWORDS[LOGS_PAGINATION] or k in self.FILTER_KEYWORDS[LOGS_OPTIONS] or k in self.FILTER_KEYWORDS[LOGS_MATCHES]): error_fields.append(k) if error_fields: raise DataValidationFailed("Invalid log filters %s" % error_fields)
def verify_container_values_type(column_name, column_data, request_data): if column_data.is_list: for value in request_data: if type(value) not in column_data.type.python_types: error = "Value type mismatch in column %s" % column_name raise DataValidationFailed(error) elif column_data.is_dict: for key, value in request_data.iteritems(): # Check if request data has unknown keys for columns other than # those in OVSDB_COMMON_COLUMNS (which should accept any keys). # Note: common columns which do not require key validation should # be added to OVSDB_COMMON_COLUMNS array. if column_name not in OVSDB_COMMON_COLUMNS: if column_data.kvs and key not in column_data.kvs: error = "Unknown key %s for column %s" % (key, column_name) raise DataValidationFailed(error) value_type = type(value) # Values in dict must match JSON schema if value_type in column_data.value_type.python_types: # If they match, they might be strings that represent other # types, so each value must be checked if kvs type exists if value_type in ovs_types.StringType.python_types \ and column_data.kvs and key in column_data.kvs: kvs_value_type = column_data.kvs[key]['type'] converted_value = \ convert_string_to_value_by_type(value, kvs_value_type) if converted_value is None: error = "Value type mismatch for key %s in column %s"\ % (key, column_name) raise DataValidationFailed(error) else: error = "Value type mismatch for key %s in column %s"\ % (key, column_name) raise DataValidationFailed(error)
def _get_row_from_uri(uri, schema, idl): verified_resource = parse.parse_url_path(uri, schema, idl) if verified_resource is None: error = "Reference %s could not be identified" % uri raise DataValidationFailed(error) # get the Row instance of the reference we are adding while verified_resource.next is not None: verified_resource = verified_resource.next row = utils.get_row_from_resource(verified_resource, idl) return row
def get_valid_key_values(key_values, schema, resource): # Validate schema keys valid_key_values = [] valid_keys = _get_valid_keys(schema, resource) for value in key_values: if value in valid_keys: valid_key_values.append(value) else: raise DataValidationFailed("Invalid key: %s" % value) return valid_key_values
def validate_args_data(self, query_args): error_messages = [] time_relative_keywords = ["yesterday", "now", "today"] time_keywords = [ "day ago", "days ago", "minute ago", "minutes ago", "hour ago", "hours ago" ] offset = getutils.get_query_arg(REST_QUERY_PARAM_OFFSET, query_args) if offset is not None: if not (offset.isdigit() and int(offset) >= 0): error_messages.append("Only positive integers are allowed" + " for offset") limit = getutils.get_query_arg(REST_QUERY_PARAM_LIMIT, query_args) if limit is not None: if not (limit.isdigit() and int(limit) > 0 and int(limit) <= MAXLIMIT): error_messages.append("Valid range for limit is from 1 to" + "%s" % MAXLIMIT) priority = getutils.get_query_arg(REST_LOGS_PARAM_PRIORITY_OPTION, query_args) if priority is not None: error_messages = self.validate_priority(priority, error_messages) priority_match = getutils.get_query_arg(REST_LOGS_PARAM_PRIORITY_MATCH, query_args) if priority_match is not None: error_messages = self.validate_priority(priority_match, error_messages) since_arg = getutils.get_query_arg(REST_LOGS_PARAM_SINCE, query_args) if since_arg is not None: error_messages = self.validate_since_until(since_arg, error_messages, time_keywords, time_relative_keywords) until_arg = getutils.get_query_arg(REST_LOGS_PARAM_UNTIL, query_args) if until_arg is not None: error_messages = self.validate_since_until(until_arg, error_messages, time_keywords, time_relative_keywords) if error_messages: raise DataValidationFailed("Incorrect data for arguments: %s" % error_messages)
def validate_json(self, json_data, operation): # Validate Schema try: self.validator.validate(json_data) except ValidationError as e: app_log.debug("Error: %s" % e.message) field = None if e.path: field = e.path[-1] error = "Json Schema Error % s. Field: %s" % (e.message, field) raise DataValidationFailed(error) # Validate required categorization keys if REQUEST_TYPE_CREATE == operation or\ REQUEST_TYPE_UPDATE == operation: self.__validate_category_keys__(json_data)
def prepare(self): try: # Call parent's prepare to check authentication super(OVSDBAPIHandler, self).prepare() # Check ovsdb connection before each request if not self.ref_object.manager.connected: self.set_status(httplib.SERVICE_UNAVAILABLE) self.finish() self.resource_path = parse_url_path(self.request.path, self.schema, self.idl, self.request.method) if self.resource_path is None: self.set_status(httplib.NOT_FOUND) self.finish() else: filters = get_filters_args(self.request.query_arguments, self.schema, self.resource_path) if self.request.method != \ REQUEST_TYPE_READ and len(filters) > 0: raise ParameterNotAllowed("argument filter is only " "allowed in %s" % REQUEST_TYPE_READ) # If Match support match = yield self.process_if_match() app_log.debug("If-Match result: %s" % match) if not match: self.finish() if self.request.method == REQUEST_TYPE_CREATE \ or self.request.method == REQUEST_TYPE_UPDATE \ or self.request.method == REQUEST_TYPE_PATCH: if int(self.request.headers['Content-Length']) \ > MAX_BODY_SIZE: raise DataValidationFailed("Content-Length too long") except APIException as e: self.on_exception(e) self.finish() except Exception, e: self.on_exception(e) self.finish()
def validate_update(self, username, account_info): """ Validates required fields, verifies if the user exists, verifies that the user is not root and that belongs to ops_netop group Raises an exception if any error occurs """ app_log.debug("Validating account info update...") self.__validate_required_fields__(account_info) if username and userutils.user_exists(username): # Avoid update a root user if username == "root": error = "Permission denied. Cannot update the root user." raise DataValidationFailed(error) else: raise NotAuthenticated
def validate_selector(self, selector): if selector: # Check if is a valid selector if selector not in VALID_CATEGORIES: raise DataValidationFailed("Invalid selector '%s'" % selector) # PUT, POST, DELETE, PATCH can only use selector param in # combination with If-Match header if HTTP_HEADER_CONDITIONAL_IF_MATCH not in self.request.headers\ and self.request.method in [REQUEST_TYPE_CREATE, REQUEST_TYPE_UPDATE, REQUEST_TYPE_PATCH, REQUEST_TYPE_DELETE]: raise ParameterNotAllowed( "Argument '%s' is only allowed " "in combination with If-Match " "header for the following methods: " "'%s', '%s', '%s', '%s'" % (REST_QUERY_PARAM_SELECTOR, REQUEST_TYPE_CREATE, REQUEST_TYPE_UPDATE, REQUEST_TYPE_PATCH, REQUEST_TYPE_DELETE))
def update_resource_keys(resource, schema, idl, data=None): """ Update the keys categories for a given resource Parameters: resource: opsrest.Resource instance schema: RestSchema idl: Idl instance data: json data given if the resource is new and doesn't have a row. """ row = None if data is not None: row = data elif resource.row: row = get_row_from_resource(resource, idl) try: resource.keys = update_category_keys(resource.keys, row, idl, schema, resource.table) except Exception as e: raise DataValidationFailed(str(e))
def create_patch(data): try: patch = jsonpatch.JsonPatch(data) except: raise DataValidationFailed("Malformed JSON patch") app_log.debug("PATCH Created patch object %s" % patch.to_string()) if not patch: raise DataValidationFailed("Empty JSON patch") # Sanity check for patch operations # NOTE supposedly jsonpatch verifies this and the resulting patch should # evaluate to True if it contains at least one operation, but this doesn't # seem to be the case in practice as any valid JSON is accepted, therefore # the next sanity check verifies the patch's operations. common_keys = (PATCH_KEY_OP, PATCH_KEY_PATH) operation_keys = {} operation_keys[PATCH_OP_TEST] = (PATCH_KEY_VALUE, ) operation_keys[PATCH_OP_REMOVE] = () operation_keys[PATCH_OP_ADD] = (PATCH_KEY_VALUE, ) operation_keys[PATCH_OP_REPLACE] = (PATCH_KEY_VALUE, ) operation_keys[PATCH_OP_MOVE] = (PATCH_KEY_FROM, ) operation_keys[PATCH_OP_COPY] = (PATCH_KEY_FROM, ) patch_list = patch.patch modified = False for patch_op in patch_list: if PATCH_KEY_OP not in patch_op: raise DataValidationFailed("Missing PATCH operation key") current_op = patch_op[PATCH_KEY_OP] if current_op not in operation_keys: raise DataValidationFailed("PATCH operation not supported") # NOTE add any other non-modifying op to this condition if current_op != PATCH_OP_TEST: modified = True valid_keys = set([]) valid_keys.update(common_keys) valid_keys.update(operation_keys[current_op]) op_keys = set(patch_op.keys()) unknown_keys = op_keys.difference(valid_keys) if unknown_keys: raise DataValidationFailed("Invalid keys '%s' for operation '%s'" % (list(unknown_keys), current_op)) missing_keys = valid_keys.difference(op_keys) if missing_keys: raise DataValidationFailed("Missing keys '%s' for operation '%s'" % (list(missing_keys), current_op)) if modified: app_log.debug("PATCH will modify row") return (patch, modified)
def post_resource(data, resource, schema, txn, idl): """ /system/bridges: POST allowed as we are adding a new Bridge to a child table /system/ports: POST allowed as we are adding a new Port to top level table /system/vrfs/vrf_default/bgp_routers: POST allowed as we are adding a back referenced resource /system/bridges/bridge_normal/ports: POST NOT allowed as we are attemtping to add a Port as a reference on bridge """ if resource is None or resource.next is None: app_log.info("POST is not allowed on System table") raise MethodNotAllowed # get the last resource pair while True: if resource.next.next is None: break resource = resource.next utils.update_resource_keys(resource.next, schema, idl, data[OVSDB_SCHEMA_CONFIG]) if verify.verify_http_method(resource, schema, REQUEST_TYPE_CREATE) is False: raise MethodNotAllowed # verify data try: verified_data = verify.verify_data(data, resource, schema, idl, REQUEST_TYPE_CREATE) except DataValidationFailed as e: app_log.debug(e) raise e app_log.debug("adding new resource to " + resource.next.table + " table") if resource.relation == OVSDB_SCHEMA_CHILD: # create new row, populate it with data # add it as a reference to the parent resource new_row = utils.setup_new_row_by_resource(resource.next, verified_data, schema, txn, idl) ref = schema.ovs_tables[resource.table].references[resource.column] if ref.kv_type: keyname = ref.keyname utils.add_kv_reference(verified_data[keyname], new_row, resource, idl) else: utils.add_reference(new_row, resource, idl) elif resource.relation == OVSDB_SCHEMA_BACK_REFERENCE: # row for a back referenced item contains the parent's reference # in the verified data new_row = utils.setup_new_row_by_resource(resource.next, verified_data, schema, txn, idl) elif resource.relation == OVSDB_SCHEMA_TOP_LEVEL: new_row = utils.setup_new_row_by_resource(resource.next, verified_data, schema, txn, idl) # a non-root table entry MUST be referenced elsewhere if OVSDB_SCHEMA_REFERENCED_BY in verified_data: for reference in verified_data[OVSDB_SCHEMA_REFERENCED_BY]: utils.add_reference(new_row, reference, idl) try: utils.exec_validators_with_resource(idl, schema, resource, REQUEST_TYPE_CREATE) except ValidationError as e: app_log.debug("Custom validations failed:") app_log.debug(e.error) raise DataValidationFailed(e.error) index = utils.create_index(schema, verified_data, resource, new_row) result = txn.commit() return OvsdbTransactionResult(result, index)
def verify_config_data(resource, data, schema, request_type, row=None, get_all_errors=False): config_keys = resource.keys[OVSDB_SCHEMA_CONFIG] reference_keys = resource.keys[OVSDB_SCHEMA_REFERENCE] verified_config_data = {} errors = [] # Check for extra or unknown attributes unknown_attribute = find_unknown_attribute(data, config_keys, reference_keys) if unknown_attribute is not None: error = "Unknown configuration attribute: %s" % unknown_attribute if get_all_errors: errors.append(error) else: raise DataValidationFailed(error) non_mutable_attributes = get_non_mutable_attributes(resource, schema) # Check for all required/valid attributes to be present for column_name in config_keys: is_optional = config_keys[column_name].is_optional if column_name in data: try: verify_attribute_type(column_name, config_keys[column_name], data[column_name]) verify_attribute_range(column_name, config_keys[column_name], data[column_name]) except DataValidationFailed as e: if get_all_errors: errors.append(e.detail) else: raise e if request_type == REQUEST_TYPE_CREATE: verified_config_data[column_name] = data[column_name] elif request_type in (REQUEST_TYPE_UPDATE, REQUEST_TYPE_PATCH): if column_name not in non_mutable_attributes: verified_config_data[column_name] = data[column_name] else: # Check if immutable attribute is being updated if row is not None: if is_optional: column_list = [] column_list.append(data[column_name]) if row.__getattr__(column_name) != column_list: error = "Attribute list '%s' cannot be modified" % column_name raise DataValidationFailed(error) elif row.__getattr__(column_name) != data[column_name]: error = "Attribute '%s' cannot be modified" % column_name raise DataValidationFailed(error) else: # PUT ignores immutable attributes, otherwise they are required. # If it's a PUT request, and the field is a mutable and mandatory, # but not found, then it's an error. # # POST requires all attributes. If it's a mandatory field not found # then it's an error. if request_type in (REQUEST_TYPE_UPDATE, REQUEST_TYPE_PATCH) and \ column_name in non_mutable_attributes: continue if not is_optional: error = "Attribute %s is required" % column_name if get_all_errors: errors.append(error) else: raise DataValidationFailed(error) if len(errors): raise DataValidationFailed(errors) else: return verified_config_data
def verify_valid_attribute_values(request_data, column_data, column_name): valid = True error_details = "" error_message = "Attribute value is invalid for column '%s'." % column_name # If data has key-values dict defined, check for missing/invalid keys # It's assumed type is validated, meaning kvs is defined for dicts only if column_data.kvs: valid_keys = set(column_data.kvs.keys()) data_keys = set(request_data.keys()) unknown_keys = [] if column_name not in OVSDB_COMMON_COLUMNS: unknown_keys = data_keys.difference(valid_keys) missing_keys = valid_keys.difference(data_keys) if unknown_keys: error_details += "Unknown keys: '%s'. " % list(unknown_keys) if missing_keys: true_missing_keys = [] for key in missing_keys: if not column_data.kvs[key]["is_optional"]: true_missing_keys.append(key) if true_missing_keys: missing_keys = true_missing_keys error_details += "Missing keys: '%s'. " % list(missing_keys) else: missing_keys = [] if unknown_keys or missing_keys: valid = False if valid: # Now that keys have been checked, # verify their values are valid for key, value in column_data.kvs.iteritems(): if key in request_data and value['enum']: enum = set(value['enum'].as_list()) data_value = request_data[key] if type(data_value) \ in ovs_types.StringType.python_types: data_value = \ convert_string_to_value_by_type(data_value, value['type']) if not is_value_in_enum(data_value, enum): valid = False error_details += "Invalid value for key '%s'. " % key break # If data has an enum defined, check for a valid value elif column_data.enum: enum = set(column_data.enum.as_list()) valid = is_value_in_enum(request_data, enum) if not valid: if error_details: error_message += ": " + error_details raise DataValidationFailed(error_message)
def __validate_category_keys__(self, json_data): if OVSDB_SCHEMA_CONFIG not in json_data: error = "Missing configuration key" raise DataValidationFailed(error)
def verify_attribute_range(column_name, column_data, request_data): # We assume verify_attribute_type has already been called, # so request_data type must be correct (save for a small # exception if column is list) data_type = type(request_data) # Check elements in in a list if column_data.is_list: # Exception: a single value might be accepted # by OVSDB as a single element list request_list = [] if data_type is not list: request_list.append(request_data) else: request_list = request_data request_len = len(request_list) if request_len < column_data.n_min or request_len > column_data.n_max: error = "List number of elements is out of range for column %s" % \ column_name raise DataValidationFailed(error) else: for element in request_list: # We usually check the value itself # But for a string, we check its length instead value = element if type(element) in ovs_types.StringType.python_types: value = len(element) if (value < column_data.rangeMin or value > column_data.rangeMax): error = "List element %s is out of range for column %s" % \ (element, column_name) raise DataValidationFailed(error) # Check elements in a dictionary elif column_data.is_dict: request_len = len(request_data) if request_len < column_data.n_min or request_len > column_data.n_max: error = "Dict number of elements is out of range for column %s" % \ column_name raise DataValidationFailed(error) else: for key, data in request_data.iteritems(): # First check the key # TODO is this necessary? Valid keys are verified prior to this value = key if type(key) in ovs_types.StringType.python_types: value = len(key) if (value < column_data.rangeMin or value > column_data.rangeMax): error = "Key %s's value is out of range for column %s" % \ (key, column_name) raise DataValidationFailed(error) # Now check ranges for values in dictionary # Skip range check for bools if type(data) is bool: continue value = data min_ = column_data.valueRangeMin max_ = column_data.valueRangeMax # If kvs is defined, ranges shouldbe taken from it if column_data.kvs and key in column_data.kvs: # Skip range check for booleans if column_data.kvs[key]['type'] == ovs_types.BooleanType: continue else: min_ = column_data.kvs[key]['rangeMin'] max_ = column_data.kvs[key]['rangeMax'] # If value is a string, it might represent values of other # types and therefore it needs to be converted if type(value) in ovs_types.StringType.python_types: column_ovs_type = column_data.kvs[key]['type'] value = \ convert_string_to_value_by_type(value, column_ovs_type) # If it was a string all along or if after convertion it's # still a string, its length range is checked instead if type(value) in ovs_types.StringType.python_types: value = len(value) if (value < min_ or value > max_): error = "Dictionary value %s is out of range " % data + \ "for key %s in column %s" % (key, column_name) raise DataValidationFailed(error) # Check single elements (non-list/non-dictionary) # Except boolean, as there's no range for them elif data_type not in ovs_types.BooleanType.python_types: # Exception: if column is not a list, # a single value list is accepted if data_type is list: value = request_data[0] data_type = type(value) else: value = request_data if data_type in ovs_types.StringType.python_types: value = len(value) if value < column_data.rangeMin or value > column_data.rangeMax: error = "Attribute value is out of range for column %s" % \ column_name raise DataValidationFailed(error)
def verify_forward_reference(data, resource, schema, idl): """ converts the forward reference URIs to corresponding Row references Parameters: data - post/put data resource - Resource object being accessed schema = restparser schema object idl - ovs.db.idl.Idl object """ reference_keys = resource.keys[OVSDB_SCHEMA_REFERENCE] verified_references = {} # check for invalid keys for key in reference_keys: if key in data: category = reference_keys[key].category relation = reference_keys[key].relation if category != OVSDB_SCHEMA_CONFIG or \ relation == 'parent': error = "Invalid reference: %s" % key raise DataValidationFailed(error) for key in reference_keys: if key in data: # this is either a URI or list of URIs or dictionary table_schema = schema.ovs_tables[resource.table] kv_type = table_schema.references[key].kv_type _refdata = data[key] notList = False # Verify if input is of DictType if kv_type and not isinstance(_refdata, types.DictType): error = "Reference needs to be a dictionary %s" % key raise DataValidationFailed(error) if not isinstance(_refdata, types.ListType) and not kv_type: notList = True _refdata = [_refdata] # check range _min = reference_keys[key].n_min _max = reference_keys[key].n_max if len(_refdata) < _min or len(_refdata) > _max: error = "Reference list is out of range for key %s" % key raise DataValidationFailed(error) if kv_type: references = {} key_type = table_schema.references[key].kv_key_type.name # TODO: Support other types for k, v in _refdata.iteritems(): if key_type == INTEGER: k = int(k) row = _get_row_from_uri(v, schema, idl) references.update({k: row}) else: references = [] for uri in _refdata: row = _get_row_from_uri(uri, schema, idl) references.append(row) if notList: references = references[0] verified_references[key] = references return verified_references
def verify_referenced_by(data, resource, schema, idl): ''' subroutine to validate referenced_by uris/attribute JSON { "referenced_by": [ { "uri": "URI1", "attributes": [ "a", "b" ] }, { "uri": "URI2" }, { "uri": "URI3", "attributes":[] } ] } ''' table = resource.table verified_referenced_by = {OVSDB_SCHEMA_REFERENCED_BY: []} for item in data: uri = item['uri'] attributes = None if 'attributes' in item: attributes = item['attributes'] # verify URI uri_resource = parse.parse_url_path(uri, schema, idl, REQUEST_TYPE_CREATE) if uri_resource is None: error = "referenced_by resource error" raise DataValidationFailed(error) # go to the last resource while uri_resource.next is not None: uri_resource = uri_resource.next if uri_resource.row is None: app_log.debug('uri: ' + uri + ' not found') error = "referenced_by resource error" raise DataValidationFailed(error) # attributes references = uri_resource.keys[OVSDB_SCHEMA_REFERENCE] reference_keys = references.keys() if attributes is not None and len(attributes) > 0: for attribute in attributes: if attribute not in reference_keys: error = "Attribute %s not found" % attribute raise DataValidationFailed(error) # check attribute is not a parent or child if references[attribute].relation is not 'reference': error = "Attribute should be a reference" raise DataValidationFailed(error) # if attribute list has only one element, make it a non-list to keep # it consistent with single attribute case (that need not be mentioned) if len(attributes) == 1: attributes = attributes[0] else: # find the lone attribute _found = False for key, value in references.iteritems(): if value.ref_table == table: if _found: error = "multiple attributes possible, specify one" raise DataValidationFailed(error) else: _found = True attributes = key # found the uri and attributes uri_resource.column = attributes verified_referenced_by[OVSDB_SCHEMA_REFERENCED_BY].append(uri_resource) return verified_referenced_by
def delete_resource(resource, schema, txn, idl): if resource.next is None: return None # get the last resource pair while True: if resource.next.next is None: break resource = resource.next # Check for invalid resource deletion if verify.verify_http_method(resource, schema, REQUEST_TYPE_DELETE) is False: raise MethodNotAllowed try: utils.exec_validators_with_resource(idl, schema, resource, REQUEST_TYPE_DELETE) except ValidationError as e: app_log.debug("Custom validations failed:") app_log.debug(e.error) raise DataValidationFailed(e.error) if resource.relation == OVSDB_SCHEMA_CHILD: if resource.next.row is None: parent = idl.tables[resource.table].rows[resource.row] rows = parent.__getattr__(resource.column) if isinstance(rows, dict): rows = rows.values() parent.__setattr__(resource.column, {}) elif isinstance(rows, ovs.db.idl.Row): rows = [rows] parent.__setattr__(resource.column, None) else: parent.__setattr__(resource.column, []) # delete rows from the table while len(rows): row = rows.pop() row.delete() else: row = utils.delete_reference(resource.next, resource, schema, idl) row.delete() elif resource.relation == OVSDB_SCHEMA_BACK_REFERENCE: if resource.next.row is None: refcol = None parent = idl.tables[resource.table].rows[resource.row] refkeys = schema.ovs_tables[resource.next.table].references for key, value in refkeys.iteritems(): if (value.relation == OVSDB_SCHEMA_PARENT and value.ref_table == resource.table): refcol = key break children = [] for row in idl.tables[resource.next.table].rows.itervalues(): if row.__getattr__(refcol) == parent: children.append(row.uuid) for child in children: row = idl.tables[resource.next.table].rows[child] row.delete() else: row = utils.get_row_from_resource(resource.next, idl) row.delete() elif resource.relation == OVSDB_SCHEMA_TOP_LEVEL: row = utils.delete_all_references(resource.next, schema, idl) # Check if the table is a top-level root table that is not referenced # and explicitly delete. resource_table = resource.next.table if resource_table not in schema.reference_map.values(): if schema.ovs_tables[resource_table].is_root: row.delete() result = txn.commit() return OvsdbTransactionResult(result)
def put_resource(data, resource, schema, txn, idl): # Allow PUT operation on System table if resource is None: raise MethodNotAllowed # We want to modify System table if resource.next is None: resource_update = resource else: while True: if resource.next.next is None: break resource = resource.next resource_update = resource.next app_log.debug("Resource = Table: %s Relation: %s Column: %s" % (resource.table, resource.relation, resource.column)) utils.update_resource_keys(resource_update, schema, idl) if resource_update is None or resource_update.row is None or\ verify.verify_http_method(resource, schema, REQUEST_TYPE_UPDATE) is False: raise MethodNotAllowed # verify data try: verified_data = verify.verify_data(data, resource, schema, idl, REQUEST_TYPE_UPDATE) except DataValidationFailed as e: app_log.debug(e) raise e # We want to modify System table if resource.next is None: updated_row = utils.update_row(resource_update, verified_data, schema, txn, idl) elif resource.relation == OVSDB_SCHEMA_CHILD: ''' Updating row from a child table Example: /system/bridges: PUT is allowed when modifying the bridge child table ''' # update row, populate it with data, add it as a reference to # the parent resource updated_row = utils.update_row(resource_update, verified_data, schema, txn, idl) elif resource.relation == OVSDB_SCHEMA_BACK_REFERENCE: ''' In this case we only modify the data of the table, but we not modify the back reference. Example: /system/vrfs/vrf_default/bgp_routers: PUT allowed as we are modifying a back referenced resource ''' # row for a back referenced item contains the parent's reference # in the verified data updated_row = utils.update_row(resource_update, verified_data, schema, txn, idl) elif resource.relation == OVSDB_SCHEMA_TOP_LEVEL: ''' Updating row when we have a relationship with a top_level table Is not allowed to update the references in other tables. Example: /system/ports: PUT allowed as we are modifying Port to top level table ''' updated_row = utils.update_row(resource_update, verified_data, schema, txn, idl) try: utils.exec_validators_with_resource(idl, schema, resource, REQUEST_TYPE_UPDATE) except ValidationError as e: app_log.debug("Custom validations failed:") app_log.debug(e.error) raise DataValidationFailed(e.error) result = txn.commit() return OvsdbTransactionResult(result)
def refill_removed_columns(patch, data, resource, schema): ''' For PATCH_OP_REMOVE and PATCH_OP_MOVE operations, applying the patch actually removes keys from the JSON that gets sent to IDL, which accomplishes nothing when removing/moving an entire column. In order to clear a column's value, it is needed to write an empty list or dict in the column and send this to IDL. For IDL, an optional column is that with min set to 0, which either makes it a list (even if just of max 1) or a dict if a key value pair is defined. Finally, this has to be done after validating the data, as an optional value with an enum defined will fail validation in verify_config_data, as the empty list/dict is not an accepted value. ''' patch_list = patch.patch config_keys = deepcopy(schema.ovs_tables[resource.table].config) references = schema.ovs_tables[resource.table].references for key in references: if references[key].category == OVSDB_SCHEMA_CONFIG: config_keys.update({key: references[key]}) patch_fix_list = [] for patch_op in patch_list: if patch_op[PATCH_KEY_OP] in (PATCH_OP_REMOVE, PATCH_OP_MOVE): # Paths are validated previously when creating the patch, # so it's guaranteed that the resource's target column is # the first element in a path like "/a/b/c", in this case "a" if patch_op[PATCH_KEY_OP] == PATCH_OP_REMOVE: column = patch_op[PATCH_KEY_PATH].split("/")[1] else: column = patch_op[PATCH_KEY_FROM].split("/")[1] # Set the "default" value default_value = [] if config_keys[column].is_dict: default_value = {} # If the PATCH_OP_MOVE or PATCH_OP_REMOVE # operation was performed on an entire column, # this column is no longer in the data, so it's # added back with the default value. if column not in data: # Create and append PATCH_OP_ADD add_op = {} add_op[PATCH_KEY_OP] = PATCH_OP_ADD add_op[PATCH_KEY_PATH] = "/%s" % column add_op[PATCH_KEY_VALUE] = default_value patch_fix_list.append(add_op) try: patch_fix = jsonpatch.JsonPatch(patch_fix_list) except: raise DataValidationFailed("Malformed final JSON patch") # If there's at least one operation, apply the patch if patch_fix: app_log.debug("Refill empty columns patch %s" % patch_fix.to_string()) return patch_fix.apply(data, in_place=True) else: return data
def patch_resource(data, resource, schema, txn, idl, uri): # Allow PATCH operation on System table if resource is None: raise MethodNotAllowed app_log.debug("Resource = Table: %s Relation: %s Column: %s" % (resource.table, resource.relation, resource.column)) # We want to modify System table if resource.next is None: resource_update = resource else: while True: if resource.next.next is None: break resource = resource.next resource_update = resource.next utils.update_resource_keys(resource_update, schema, idl) if resource_update is None or resource_update.row is None or \ verify.verify_http_method(resource, schema, REQUEST_TYPE_PATCH) is False: app_log.debug("Failed http_method verification") raise MethodNotAllowed needs_update = False # Create and verify patch (patch, needs_update) = create_patch(data) # Get the JSON to patch row_json = yield get_current_row(resource_update, uri, schema, idl) # Now apply the patch to that JSON patched_row_json = apply_patch(patch, row_json, resource_update, schema) # If at least one PATCH operation changed the row, # since a valid patch can contain just a PATCH_OP_TEST, # validate the patched row and update row with IDL if needs_update: # Validate and prepare final JSON to send to IDL new_row_json = prepare_data(patch, patched_row_json, resource, resource_update, schema, idl) app_log.debug("New row -> %s" % new_row_json) # Update resource with the patched JSON # System: resource.next is None # All other rows: resource.relation is not None if resource.next is None or resource.relation is not None: app_log.debug("Updating row...") # updated_row is not used for now but eventually will be returned updated_row = utils.update_row(resource_update, new_row_json, schema, txn, idl) try: utils.exec_validators_with_resource(idl, schema, resource, REQUEST_TYPE_PATCH) except ValidationError as e: app_log.debug("Custom validations failed:") app_log.debug(e.error) raise DataValidationFailed(e.error) result = txn.commit() raise gen.Return(OvsdbTransactionResult(result))
def verify_post_data(data, resource, schema, idl): if OVSDB_SCHEMA_CONFIG not in data: raise DataValidationFailed("Missing %s data" % OVSDB_SCHEMA_CONFIG) _data = data[OVSDB_SCHEMA_CONFIG] # verify config and reference columns data verified_data = {} # when adding a child with kv_type of forward referencing, # the configuration data must contain the 'keyname' used to # identify the reference of the new resource created. if resource.relation is OVSDB_SCHEMA_CHILD: ref = resource.keys[OVSDB_SCHEMA_REFERENCE][resource.column] reference = ref if reference.kv_type: keyname = reference.keyname if keyname not in _data: error = "Missing keyname attribute to" +\ " reference the new resource" +\ " from the parent" raise DataValidationFailed(error) else: verified_data[keyname] = _data[keyname] _data.pop(keyname) try: # verify configuration data, add it to verified data verified_config_data = verify_config_data(resource.next, _data, schema, REQUEST_TYPE_CREATE) verified_data.update(verified_config_data) # verify reference data, add it to verified data verified_reference_data = verify_forward_reference( _data, resource.next, schema, idl) verified_data.update(verified_reference_data) # a non-root top-level table must be referenced by another resource # or ovsdb-server will garbage-collect it is_root = schema.ovs_tables[resource.next.table].is_root if resource.relation == OVSDB_SCHEMA_TOP_LEVEL and not is_root: if OVSDB_SCHEMA_REFERENCED_BY not in data: error = "Missing %s" % OVSDB_SCHEMA_REFERENCED_BY raise DataValidationFailed(error) _data = data[OVSDB_SCHEMA_REFERENCED_BY] verified_referenced_by_data = verify_referenced_by( _data, resource.next, schema, idl) verified_data.update(verified_referenced_by_data) elif resource.relation == OVSDB_SCHEMA_BACK_REFERENCE: references = resource.next.keys[OVSDB_SCHEMA_REFERENCE] for key, value in references.iteritems(): if value.relation == 'parent': parent_row = idl.tables[resource.table].rows[resource.row] verified_data.update({key: parent_row}) except DataValidationFailed as e: raise e # data verified return verified_data