def validate(self, context, data_dict, schema, action): """ Validate and convert for package_create, package_update and package_show actions. """ thing, action_type = action.split('_') t = data_dict.get('type') if not t or t not in self._schemas: return data_dict, { 'type': ["Unsupported dataset type: {t}".format(t=t)] } scheming_schema = self._expanded_schemas[t] if action_type == 'show': get_validators = _field_output_validators elif action_type == 'create': get_validators = _field_create_validators else: get_validators = _field_validators for f in scheming_schema['dataset_fields']: schema[f['field_name']] = get_validators( f, scheming_schema, f['field_name'] not in schema) resource_schema = schema['resources'] for f in scheming_schema.get('resource_fields', []): resource_schema[f['field_name']] = get_validators( f, scheming_schema, False) return navl_validate(data_dict, schema, context)
def validate(self, context, data_dict, schema, action): """ Validate and convert for package_create, package_update and package_show actions. """ thing, action_type = action.split('_') t = data_dict.get('type') if not t or t not in self._schemas: return data_dict, {'type': [ "Unsupported dataset type: {t}".format(t=t)]} scheming_schema = self._expanded_schemas[t] if action_type == 'show': get_validators = _field_output_validators elif action_type == 'create': get_validators = _field_create_validators else: get_validators = _field_validators for f in scheming_schema['dataset_fields']: schema[f['field_name']] = get_validators( f, scheming_schema, f['field_name'] not in schema ) resource_schema = schema['resources'] for f in scheming_schema.get('resource_fields', []): resource_schema[f['field_name']] = get_validators( f, scheming_schema, False) return navl_validate(data_dict, schema, context)
def validate(self, context, data_dict, schema, action): """ Validate and convert for package_create, package_update and package_show actions. """ thing, action_type = action.split('_') t = data_dict.get('type') if not t or t not in self._schemas: return data_dict, { 'type': [ "Unsupported dataset type: {t}".format(t=t) ] } scheming_schema = self._expanded_schemas[t] get_validators = { 'show': _field_output_validators, 'create': _field_create_validators, 'upate': _field_create_validators }.get(action_type, _field_validators) fg = ( (scheming_schema['dataset_fields'], schema), (scheming_schema['resource_fields'], schema['resources']) ) for field_list, destination in fg: for f in field_list: destination[f['field_name']] = get_validators( f, scheming_schema, f['field_name'] not in schema ) # Apply default field values before going through validation. This # deals with fields that have form_snippet set to null, and fields # that have defaults added after initial creation. if data_dict.get(f['field_name']) is None: default = f.get('default') if default: data_dict[f['field_name']] = ( helpers.scheming_render_from_string( source=default ) ) return navl_validate(data_dict, schema, context)
def _validate_sub_data(sub_data, schema, context): validated_list = [] errors_list = [] for chunk in sub_data: for k, v in schema.items(): if not isinstance(v, list) or not isinstance(chunk.get(k), list): continue if chunk[k]: chunk[k] = chunk[k][0] validated_data, err = navl_validate(chunk, schema, context) validated_list.append(validated_data) errors_list.append(err) return validated_list, errors_list
def validate(self, context, data_dict, schema, action): thing, action_type = action.split('_') t = data_dict.get('type') if not t or t not in self._schemas: return data_dict, { 'type': "Unsupported {thing} type: {t}".format(thing=thing, t=t) } scheming_schema = self._expanded_schemas[t] scheming_fields = scheming_schema['fields'] get_validators = (_field_output_validators_group if action_type == 'show' else _field_validators) for f in scheming_fields: schema[f['field_name']] = get_validators( f, scheming_schema, f['field_name'] not in schema) return navl_validate(data_dict, schema, context)
def validate(self, context, data_dict, schema, action): thing, action_type = action.split('_') t = data_dict.get('type') if not t or t not in self._schemas: return data_dict, {'type': "Unsupported {thing} type: {t}".format( thing=thing, t=t)} scheming_schema = self._expanded_schemas[t] scheming_fields = scheming_schema['fields'] get_validators = ( _field_output_validators_group if action_type == 'show' else _field_validators ) for f in scheming_fields: schema[f['field_name']] = get_validators( f, scheming_schema, f['field_name'] not in schema ) return navl_validate(data_dict, schema, context)
def validate(self, context, data_dict, schema, action): """ Validate and convert for package_create, package_update and package_show actions. """ thing, action_type = action.split('_') t = data_dict.get('type') if not t or t not in self._schemas: return data_dict, { 'type': ["Unsupported dataset type: {t}".format(t=t)] } scheming_schema = self._expanded_schemas[t] before = scheming_schema.get('before_validators') after = scheming_schema.get('after_validators') if action_type == 'show': get_validators = _field_output_validators before = after = None elif action_type == 'create': get_validators = _field_create_validators else: get_validators = _field_validators if before: schema['__before'] = validation.validators_from_string( before, None, scheming_schema) if after: schema['__after'] = validation.validators_from_string( after, None, scheming_schema) fg = ((scheming_schema['dataset_fields'], schema, True), (scheming_schema['resource_fields'], schema['resources'], False)) composite_convert_fields = [] for field_list, destination, convert_extras in fg: for f in field_list: convert_this = convert_extras and f['field_name'] not in schema destination[f['field_name']] = get_validators( f, scheming_schema, convert_this) if convert_this and 'repeating_subfields' in f: composite_convert_fields.append(f['field_name']) def composite_convert_to(key, data, errors, context): unflat = unflatten(data) for f in composite_convert_fields: if f not in unflat: continue data[(f, )] = json.dumps(unflat[f], default=lambda x: None if x == missing else x) convert_to_extras((f, ), data, errors, context) del data[(f, )] if action_type == 'show': if composite_convert_fields: for ex in data_dict['extras']: if ex['key'] in composite_convert_fields: data_dict[ex['key']] = json.loads(ex['value']) data_dict['extras'] = [ ex for ex in data_dict['extras'] if ex['key'] not in composite_convert_fields ] else: dataset_composite = { f['field_name'] for f in scheming_schema['dataset_fields'] if 'repeating_subfields' in f } if dataset_composite: expand_form_composite(data_dict, dataset_composite) resource_composite = { f['field_name'] for f in scheming_schema['resource_fields'] if 'repeating_subfields' in f } if resource_composite and 'resources' in data_dict: for res in data_dict['resources']: expand_form_composite(res, resource_composite) # convert composite package fields to extras so they are stored if composite_convert_fields: schema = dict(schema, __after=schema.get('__after', []) + [composite_convert_to]) return navl_validate(data_dict, schema, context)