def inventory_entry_bulk_create(context, data_dict): model = context['model'] schema = context['schema'] session = context['session'] organization = model.Group.get(context['organization_name']) inventory_entry_dict = {'group_id': organization.id} if not data_dict['field-name-input-0']: raise ValidationError({'error': [_('Please add at least one inventory entry.')]}) for inventory_entry_id in range(10): inventory_entry_name = data_dict['field-name-input-' + str(inventory_entry_id)] if not inventory_entry_name: break inventory_entry_dict['title'] = inventory_entry_name inventory_entry_dict['recurring_interval'] = data_dict['field-recurring-input-' + str(inventory_entry_id)] inventory_entry_dict['is_recurring'] = (inventory_entry_dict['recurring_interval'] != '0') data, errors = navl_validate(inventory_entry_dict, schema, context) if errors: session.rollback() # TODO @palcu: i18n raise ValidationError({'error': [_('Verificati intrarea cu numarul {0}.'.format(inventory_entry_id+1))]}) obj = table_dict_save(inventory_entry_dict, InventoryEntry, context) model.repo.commit() return table_dictize(obj, context)
def validate(self, context, data_dict, schema, action): """ Validate and convert for package_create, package_update and package_show actions. """ thing, action_type = action.split('_') t = data_dict.get('type') if not t or t not in self._schemas: return data_dict, {'type': [ "Unsupported dataset type: {t}".format(t=t)]} scheming_schema = self._expanded_schemas[t] if action_type == 'show': get_validators = _field_output_validators elif action_type == 'create': get_validators = _field_create_validators else: get_validators = _field_validators for f in scheming_schema['dataset_fields']: schema[f['field_name']] = get_validators(f, scheming_schema, f['field_name'] not in schema) resource_schema = schema['resources'] for f in scheming_schema['resource_fields']: resource_schema[f['field_name']] = get_validators( f, scheming_schema, False) return navl_validate(data_dict, schema, context)
def metadata_standard_create(context, data_dict): """ Create a new metadata standard. You must be authorized to create metadata standards. :param name: the name of the new metadata standard (optional - auto-generated if not supplied); must conform to standard naming rules :type name: string :param description: the description of the metadata standard (optional) :type description: string :param standard_name: the name of the metadata standard :type standard_name: string :param standard_version: the version of the metadata standard (nullable) :type standard_version: string :param parent_standard_id: the id or name of the metadata standard from which this standard is derived (nullable) :type parent_standard_id: string :param metadata_template_json: a complete example of a JSON metadata dictionary that conforms to this standard; may be used for initializing a search index :type metadata_template_json: string :param deserialize_json: convert JSON string fields to objects in the output dict (optional, default: ``False``) :type deserialize_json: boolean :returns: the newly created metadata standard (unless 'return_id_only' is set to True in the context, in which case just the metadata standard id will be returned) :rtype: dictionary """ log.info("Creating metadata standard: %r", data_dict) tk.check_access('metadata_standard_create', context, data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) return_id_only = context.get('return_id_only', False) deserialize_json = asbool(data_dict.get('deserialize_json')) data, errors = tk.navl_validate(data_dict, schema.metadata_standard_create_schema(), context) if errors: session.rollback() raise tk.ValidationError(errors) metadata_standard = model_save.metadata_standard_dict_save(data, context) rev = model.repo.new_revision() rev.author = user if 'message' in context: rev.message = context['message'] else: rev.message = _( u'REST API: Create metadata standard %s') % metadata_standard.id if not defer_commit: model.repo.commit() output = metadata_standard.id if return_id_only \ else tk.get_action('metadata_standard_show')(context, {'id': metadata_standard.id, 'deserialize_json': deserialize_json}) return output
def role_show(context, data_dict): """ Return a role definition. You must be a sysadmin to view roles. :param id: the id of the role :type id: string :rtype: dictionary """ log.debug("Retrieving role: %r", data_dict) role_id = tk.get_or_bust(data_dict, 'id') role = extmodel.Role.get(role_id) if role is not None: role_id = role.id else: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('Role'))) tk.check_access('role_show', context, data_dict) context['role'] = role role_dict = dictization.role_dictize(role, context) result_dict, errors = tk.navl_validate(role_dict, schema.role_show_schema(), context) return result_dict
def issues_for_dataset(dataset_id, get_query_dict): query, errors = toolkit.navl_validate( dict(get_query_dict), schema.issue_dataset_controller_schema()) if errors: raise toolkit.ValidationError(errors) query.pop('__extras', None) return _search_issues(dataset_id=dataset_id, **query)
def validate(self): ''' ''' schema = self.context.get('schema', self.schema) self.params, errors = toolkit.navl_validate(self.params, schema, self.context) if errors: raise toolkit.ValidationError(errors)
def all_issues(get_query_dict): query, errors = toolkit.navl_validate( dict(get_query_dict), schema.issue_dataset_controller_schema()) if errors: raise toolkit.ValidationError(errors) query.pop('__extras', None) return _search_issues(include_datasets=True, **query)
def jsonpatch_show(context, data_dict): """ Return a JSON Patch definition. The structure of the returned dictionary may be customized by passing 'schema' in the context. :param id: the id of the JSON Patch :type id: string :rtype: dictionary """ log.debug("Retrieving JSON Patch: %r", data_dict) jsonpatch_id = tk.get_or_bust(data_dict, 'id') jsonpatch = JSONPatch.get(jsonpatch_id) if jsonpatch is not None: jsonpatch_id = jsonpatch.id else: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('JSON Patch'))) tk.check_access('jsonpatch_show', context, data_dict) output_schema = context.get('schema') context['jsonpatch'] = jsonpatch jsonpatch_dict = jsonpatch_dictize(jsonpatch, context) result_dict, errors = tk.navl_validate(jsonpatch_dict, output_schema or schema.jsonpatch_show_schema(), context) return result_dict
def validate(self, context, data_dict, schema, action): ''' We're using a different validation schema if the dataset is private ! ''' is_requestdata_type = self._is_requestdata_type(data_dict) if action in ['package_create', 'package_update']: private = False if str(data_dict.get( 'private', '')).lower() == 'false' else True if private: self._update_with_private_modify_package_schema(schema) if is_requestdata_type: self._update_with_requestdata_modify_package_schema(schema) fields_to_skip = config.get( 'hdx.validation.allow_skip_for_sysadmin', '').split(',') if len(fields_to_skip) > 0 and fields_to_skip[0] and \ authz.is_sysadmin(c.user) and context.get(hdx_update.SKIP_VALIDATION): self._update_with_skip_validation(schema, fields_to_skip) if action == 'package_show': if is_requestdata_type: self._update_with_requestdata_show_package_schema(schema) return toolkit.navl_validate(data_dict, schema, context)
def validate(self, context, data_dict, schema, action): """ Validate and convert for package_create, package_update and package_show actions. """ thing, action_type = action.split('_') t = data_dict.get('type') if not t or t not in self._schemas: return data_dict, { 'type': ["Unsupported dataset type: {t}".format(t=t)] } scheming_schema = self._expanded_schemas[t] if action_type == 'show': get_validators = _field_output_validators elif action_type == 'create': get_validators = _field_create_validators else: get_validators = _field_validators for f in scheming_schema['dataset_fields']: schema[f['field_name']] = get_validators( f, scheming_schema, f['field_name'] not in schema) resource_schema = schema['resources'] for f in scheming_schema.get('resource_fields', []): resource_schema[f['field_name']] = get_validators( f, scheming_schema, False) return navl_validate(data_dict, schema, context)
def validate(self, context, data_dict, schema, action): thing, action_type = action.split('_') t = data_dict.get('type') if not t or t not in self._schemas: # pragma: no cover return data_dict, {'type': "Unsupported {thing} type: {t}".format( thing=thing, t=t)} scheming_schema = self._schemas[t] scheming_fields = scheming_schema['fields'] for f in scheming_fields: if action_type == 'show': if f['field_name'] not in schema: validators = [convert_from_extras, ignore_missing] else: validators = [ignore_missing] if 'output_validators' in f: validators += validators_from_string(f['output_validators']) else: if 'validators' in f: validators = validators_from_string(f['validators']) else: validators = [ignore_missing, unicode] if f['field_name'] not in schema: validators = validators + [convert_to_extras] schema[f['field_name']] = validators return navl_validate(data_dict, schema, context)
def _validate_show(issue_id, dataset_id, session, schema=schema.issue_show_controller_schema()): query, errors = toolkit.navl_validate( data={'id': issue_id, 'dataset_id': dataset_id}, schema=schema, context={'session': session, 'model': cmodel}) if errors: raise toolkit.ValidationError(errors) return issue_id
def _validate_show(issue_number, dataset_id, session, schema=schema.issue_show_controller_schema()): query, errors = toolkit.navl_validate( data={'issue_number': issue_number, 'dataset_id': dataset_id}, schema=schema, context={'session': session, 'model': cmodel}) if errors: raise toolkit.ValidationError(errors) return query
def jsonpatch_create(context, data_dict): """ Create a new JSON Patch for some object. This consists of a single patch "operation" as per `Section 4`_ of `RFC6902`_. The structure of the returned dictionary may be customized by passing 'schema' in the context. :param model_name: this is the 'xyz' part of an 'xyz_show' action to which the patch will be applied :type model_name: string :param object_id: the id or name of the 'xyz' object :type object_id: string :param operation: the JSON Patch operation, e.g. { "op": "add", "path": "/a/b/c", "value": "foo" } :type operation: dictionary :param scope: may be used to filter the patches that get applied under different scenarios (optional) :type scope: string :param ordinal: set the order of the patch within the list of patches to be applied (optional: default ``0``); patches with equivalent ordinal values will be applied in timestamp (creation) order :type ordinal: integer :param data: any additional information about the patch (optional) :type data: dictionary :returns: the newly created JSON Patch (unless 'return_id_only' is set to True in the context, in which case just the JSON Patch id will be returned) :rtype: dictionary .. _`RFC6902`: https://tools.ietf.org/html/rfc6902 .. _`Section 4`: https://tools.ietf.org/html/rfc6902#section-4 """ log.info("Creating JSON Patch: %r", data_dict) tk.check_access('jsonpatch_create', context, data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) return_id_only = context.get('return_id_only', False) data, errors = tk.navl_validate(data_dict, schema.jsonpatch_create_schema(), context) if errors: session.rollback() raise tk.ValidationError(errors) jsonpatch = jsonpatch_dict_save(data, context) rev = model.repo.new_revision() rev.author = user if 'message' in context: rev.message = context['message'] else: rev.message = _(u'REST API: Create JSON Patch %s') % jsonpatch.id if not defer_commit: model.repo.commit() output = jsonpatch.id if return_id_only \ else tk.get_action('jsonpatch_show')(context, {'id': jsonpatch.id}) return output
def issues_for_dataset(dataset_id, get_query_dict): query, errors = toolkit.navl_validate( dict(get_query_dict), schema.issue_dataset_controller_schema() ) if errors: raise toolkit.ValidationError(errors) query.pop('__extras', None) return _search_issues(dataset_id=dataset_id, **query)
def validate(self, context, data_dict, schema, action): # when triggered through harvesters, default schema is taken (see usage of default_create_package_schema in harvesters) # override this method to enforce our schema if (action == 'package_create'): schema = self.create_package_schema(isHarvesting = (not context.get('save'))) elif (action == 'package_update'): schema = self.update_package_schema(isHarvesting = (not context.get('save'))) return toolkit.navl_validate(data_dict, schema, context)
def all_issues(get_query_dict): query, errors = toolkit.navl_validate( dict(get_query_dict), schema.issue_dataset_controller_schema() ) if errors: raise toolkit.ValidationError(errors) query.pop('__extras', None) return _search_issues(include_datasets=True, **query)
def permission_undefine(context, data_dict): """ Delete the relations that define a permission for some action(s). This might be used, for example, if an action previously associated with a permission has been deprecated. Calls to this function should normally be scripted, or coded in an extension, rather than being made available in the UI. :param content_type: conceptual object type :type content_type: string :param operation: conceptual action :type operation: string :param actions: names of action functions to be dissociated from the given content type and operation :type actions: list of strings :returns: the updated permission with its remaining associated actions :rtype: dictionary """ log.info("Undefining permission: %r", data_dict) tk.check_access('permission_undefine', context, data_dict) model = context['model'] session = context['session'] defer_commit = context.get('defer_commit', False) data, errors = tk.navl_validate(data_dict, schema.permission_undefine_schema(), context) if errors: session.rollback() raise tk.ValidationError(errors) # find permission permission = session.query(extmodel.Permission) \ .filter_by(content_type=data['content_type'], operation=data['operation']) \ .first() if permission is None: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('Permission'))) # remove permission actions permission_actions = session.query(extmodel.PermissionAction) \ .filter_by(permission_id=permission.id) \ .filter(extmodel.PermissionAction.action_name.in_(data['actions'])) \ .all() for permission_action in permission_actions: permission_action.delete() if not defer_commit: model.repo.commit() context['include_actions'] = True return dictization.permission_dictize(permission, context)
def plugin_validate(plugin, context, data_dict, schema, action): """ Backwards compatibility with 2.x dataset group and org plugins: return a default validate method if one has not been provided. """ if hasattr(plugin, 'validate'): result = plugin.validate(context, data_dict, schema, action) if result is not None: return result return toolkit.navl_validate(data_dict, schema, context)
def validate(self, context, data_dict, schema, action): if action in ('package_update', 'package_create'): # If the caller to package_update specified a schema (e.g. # harvesters specify the default schema) then we don't want to # override that. if not context.get('schema'): schema = self.form_to_db_schema_options(context) if 'api_version' in context: # Tag validation is looser than CKAN default schema['tags'] = tags_schema() return tk.navl_validate(data_dict, schema, context)
def validate(self, context, data_dict, schema, action): if action in ('package_update', 'package_create'): # If the caller to package_update specified a schema (e.g. # harvesters specify the default schema) then we don't want to # override that. if not context.get('schema'): schema = self.form_to_db_schema_options(context) if 'api_version' in context: # Tag validation is looser than CKAN default schema['tags'] = tags_schema() return toolkit.navl_validate(data_dict, schema, context)
def issues_for_org(org_id, get_query_dict): query, errors = toolkit.navl_validate( dict(get_query_dict), schema.issue_dataset_controller_schema()) if errors: raise toolkit.ValidationError(errors) query.pop('__extras', None) template_params = _search_issues(organization_id=org_id, include_datasets=True, **query) template_params['org'] = \ logic.get_action('organization_show')({}, {'id': org_id}) return template_params
def metadata_json_attr_map_create(context, data_dict): """ Create a one-to-one mapping from a metadata JSON element to a metadata record attribute. When a metadata record is created or updated, metadata JSON values are copied into metadata record attributes for each such defined mapping. :param json_path: JSON pointer to a location in a metadata record dictionary :type json_path: string :param record_attr: the name of an attribute in the metadata record schema :type record_attr: string :param is_key: no longer used :type is_key: boolean :param metadata_standard_id: the id or name of the metadata standard for which this mapping is defined :type metadata_standard_id: string :returns: the newly created MetadataJSONAttrMap object :rtype: dictionary """ log.info("Creating metadata JSON attribute mapping: %r", data_dict) tk.check_access('metadata_json_attr_map_create', context, data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) return_id_only = context.get('return_id_only', False) data_dict['is_key'] = False data, errors = tk.navl_validate( data_dict, schema.metadata_json_attr_map_create_schema(), context) if errors: session.rollback() raise tk.ValidationError(errors) metadata_json_attr_map = model_save.metadata_json_attr_map_dict_save( data, context) rev = model.repo.new_revision() rev.author = user if 'message' in context: rev.message = context['message'] else: rev.message = _(u'REST API: Create metadata JSON attribute mapping %s' ) % metadata_json_attr_map.id if not defer_commit: model.repo.commit() output = metadata_json_attr_map.id if return_id_only \ else tk.get_action('metadata_json_attr_map_show')(context, {'id': metadata_json_attr_map.id}) return output
def issues_for_org(org_id, get_query_dict): query, errors = toolkit.navl_validate( dict(get_query_dict), schema.issue_dataset_controller_schema() ) if errors: raise toolkit.ValidationError(errors) query.pop('__extras', None) template_params = _search_issues(organization_id=org_id, include_datasets=True, **query) template_params['org'] = \ logic.get_action('organization_show')({}, {'id': org_id}) return template_params
def workflow_annotation_create(context, data_dict): """ Create a new workflow annotation definition, which simply makes it easier for annotations to be added to metadata records via the UI. You must be authorized to create workflow annotations. :param name: the (augmented metadata record) dictionary key under which an annotation will be added :type name: string :param attributes: a dict of names and JSON types of the annotation attributes :type attributes: dictionary :returns: the newly created workflow annotation (unless 'return_id_only' is set to True in the context, in which case just the workflow annotation id will be returned) :rtype: dictionary """ log.info("Creating workflow annotation: %r", data_dict) tk.check_access('workflow_annotation_create', context, data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) return_id_only = context.get('return_id_only', False) data, errors = tk.navl_validate(data_dict, schema.workflow_annotation_create_schema(), context) if errors: session.rollback() raise tk.ValidationError(errors) workflow_annotation = model_save.workflow_annotation_dict_save( data, context) rev = model.repo.new_revision() rev.author = user if 'message' in context: rev.message = context['message'] else: rev.message = _(u'REST API: Create workflow annotation %s' ) % workflow_annotation.id if not defer_commit: model.repo.commit() output = workflow_annotation.id if return_id_only \ else tk.get_action('workflow_annotation_show')(context, {'id': workflow_annotation.id}) return output
def workflow_transition_create(context, data_dict): """ Create a new workflow transition. You must be authorized to create workflow transitions. :param from_state_id: the id or name of the source workflow state (nullable - null implies that the target state is an initial workflow state) :type from_state_id: string :param to_state_id: the id or name of the target workflow state :type to_state_id: string :returns: the newly created workflow transition (unless 'return_id_only' is set to True in the context, in which case just the workflow transition id will be returned) :rtype: dictionary """ log.info("Creating workflow transition: %r", data_dict) tk.check_access('workflow_transition_create', context, data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) return_id_only = context.get('return_id_only', False) data, errors = tk.navl_validate(data_dict, schema.workflow_transition_create_schema(), context) if errors: session.rollback() raise tk.ValidationError(errors) workflow_transition = model_save.workflow_transition_dict_save( data, context) rev = model.repo.new_revision() rev.author = user if 'message' in context: rev.message = context['message'] else: rev.message = _(u'REST API: Create workflow transition %s' ) % workflow_transition.id if not defer_commit: model.repo.commit() output = workflow_transition.id if return_id_only \ else tk.get_action('workflow_transition_show')(context, {'id': workflow_transition.id}) return output
def activity_create(context: Context, data_dict: DataDict) -> Optional[dict[str, Any]]: """Create a new activity stream activity. You must be a sysadmin to create new activities. :param user_id: the name or id of the user who carried out the activity, e.g. ``'seanh'`` :type user_id: string :param object_id: the name or id of the object of the activity, e.g. ``'my_dataset'`` :param activity_type: the type of the activity, this must be an activity type that CKAN knows how to render, e.g. ``'new package'``, ``'changed user'``, ``'deleted group'`` etc. :type activity_type: string :param data: any additional data about the activity :type data: dictionary :returns: the newly created activity :rtype: dictionary """ tk.check_access("activity_create", context, data_dict) if not tk.config.get_value("ckan.activity_streams_enabled"): return model = context["model"] # Any revision_id that the caller attempts to pass in the activity_dict is # ignored and removed here. if "revision_id" in data_dict: del data_dict["revision_id"] sch = context.get("schema") or schema.default_create_activity_schema() data, errors = tk.navl_validate(data_dict, sch, context) if errors: raise tk.ValidationError(errors) activity = activity_dict_save(data, context) if not context.get("defer_commit"): model.repo.commit() log.debug("Created '%s' activity" % activity.activity_type) return model_activity.activity_dictize(activity, context)
def validate(self, context, data_dict, schema, action): data, errors = toolkit.navl_validate(data_dict, schema, context) if action == 'organization_show': # Transform Apinf fields saved as extras into Organization object fields extras = ['url', 'contact_name', 'contact_email', 'contact_phone'] effective_extras = [] for extra in data_dict['extras']: if extra['key'] in extras: data[extra['key']] = extra['value'] else: effective_extras.append(extra) data['extras'] = effective_extras return data, errors
def role_create(context, data_dict): """ Create a new role. You must be a sysadmin to create roles. :param name: the name of the role; standard naming rules apply :type name: string :param title: the title of the role (optional) :type title: string :param description: a description of the role (optional) :type description: string :returns: the newly created role (unless 'return_id_only' is set to True in the context, in which case just the role id will be returned) :rtype: dictionary """ log.info("Creating role: %r", data_dict) tk.check_access('role_create', context, data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) return_id_only = context.get('return_id_only', False) data, errors = tk.navl_validate(data_dict, schema.role_create_schema(), context) if errors: session.rollback() raise tk.ValidationError(errors) role = dictization.role_dict_save(data, context) rev = model.repo.new_revision() rev.author = user if 'message' in context: rev.message = context['message'] else: rev.message = _(u'REST API: Create role %s') % role.id if not defer_commit: model.repo.commit() output = role.id if return_id_only \ else tk.get_action('role_show')(context, {'id': role.id}) return output
def validate(context, data_dict, default_schema): ''' Validate the data_dict against a schema. If a schema is not available in the context (under the key 'schema') then the default schema is used. If the data_dict fails the validation process a ValidationError is raised, otherwise the potentially updated data_dict is returned. :param context: the ckan context dict :param data_dict: the dict to validate :param default_schema: the default schema to use if the context doesn't have one ''' schema = context.get(u'schema', default_schema) data_dict, errors = toolkit.navl_validate(data_dict, schema, context) if errors: raise toolkit.ValidationError(errors) return data_dict
def validate(self, context, data_dict, schema, action): thing, action_type = action.split('_') t = data_dict.get('type') if not t or t not in self._schemas: return data_dict, {'type': "Unsupported {thing} type: {t}".format( thing=thing, t=t)} scheming_schema = self._expanded_schemas[t] scheming_fields = scheming_schema['fields'] get_validators = (_field_output_validators if action_type == 'show' else _field_validators) for f in scheming_fields: schema[f['field_name']] = get_validators(f, scheming_schema, f['field_name'] not in schema) return navl_validate(data_dict, schema, context)
def validate(self, context, data_dict, schema, action): thing, action_type = action.split('_') t = data_dict.get('type') if not t or t not in self._schemas: return data_dict, { 'type': "Unsupported {thing} type: {t}".format(thing=thing, t=t) } scheming_schema = self._expanded_schemas[t] scheming_fields = scheming_schema['fields'] get_validators = (_field_output_validators_group if action_type == 'show' else _field_validators) for f in scheming_fields: schema[f['field_name']] = get_validators( f, scheming_schema, f['field_name'] not in schema) return navl_validate(data_dict, schema, context)
def validate(self, context, data_dict, schema, action): if action in ('package_update', 'package_create'): # If the caller to package_update specified a schema (e.g. # harvesters specify the default schema) then we don't want to # override that. if not context.get('schema'): if 'api_version' in context: # When accessed by the API, just use the default schemas. # It's only the forms that are customized to make it easier # for humans. if action == 'package_create': schema = default_schema.default_create_package_schema() else: schema = default_schema.default_update_package_schema() else: # Customized schema for DGU form schema = self.form_to_db_schema_options(context) return toolkit.navl_validate(data_dict, schema, context)
def validate(self, context, data_dict, schema, action): """ Validate and convert for package_create, package_update and package_show actions. """ thing, action_type = action.split('_') t = data_dict.get('type') if not t or t not in self._schemas: # pragma: no cover return data_dict, {'type': [ "Unsupported dataset type: {t}".format(t=t)]} scheming_schema = self._schemas[t] for f in scheming_schema['dataset_fields']: if action_type == 'show': if f['field_name'] not in schema: validators = [convert_from_extras, ignore_missing] else: validators = [ignore_missing] if 'output_validators' in f: validators += validators_from_string(f['output_validators']) else: if 'validators' in f: validators = validators_from_string(f['validators']) else: validators = [ignore_missing, unicode] if f['field_name'] not in schema: validators = validators + [convert_to_extras] schema[f['field_name']] = validators resource_schema = schema['resources'] for f in scheming_schema['resource_fields']: if action_type == 'show': validators = [ignore_missing] if 'output_validators' in f: validators += validators_from_string(f['output_validators']) else: if 'validators' in f: validators = validators_from_string(f['validators']) else: validators = [ignore_missing, unicode] resource_schema[f['field_name']] = validators return navl_validate(data_dict, schema, context)
def inventory_entry_update(context, data_dict): # TODO @palcu: DRY this w/ inventory_entry_create model = context['model'] schema = context['schema'] session = context['session'] organization = model.Group.get(context['organization_name']) data_dict['group_id'] = organization.id data_dict['is_recurring'] = (data_dict['recurring_interval'] != '0') data, errors = navl_validate(data_dict, schema, context) if errors: session.rollback() raise ValidationError(errors) obj = table_dict_save(data_dict, InventoryEntry, context) model.repo.commit() return table_dictize(obj, context)
def inventory_entry_bulk_create(context, data_dict): model = context['model'] schema = context['schema'] session = context['session'] organization = model.Group.get(context['organization_name']) inventory_entry_dict = {'group_id': organization.id} if not data_dict['field-name-input-0']: raise ValidationError( {'error': [_('Please add at least one inventory entry.')]}) for inventory_entry_id in range(10): inventory_entry_name = data_dict['field-name-input-' + str(inventory_entry_id)] if not inventory_entry_name: break inventory_entry_dict['title'] = inventory_entry_name inventory_entry_dict['recurring_interval'] = data_dict[ 'field-recurring-input-' + str(inventory_entry_id)] inventory_entry_dict['is_recurring'] = ( inventory_entry_dict['recurring_interval'] != '0') data, errors = navl_validate(inventory_entry_dict, schema, context) if errors: session.rollback() # TODO @palcu: i18n raise ValidationError({ 'error': [ _('Verificati intrarea cu numarul {0}.'.format( inventory_entry_id + 1)) ] }) obj = table_dict_save(inventory_entry_dict, InventoryEntry, context) model.repo.commit() return table_dictize(obj, context)
def user_invite(context, data_dict): '''Invite a new user. You must be authorized to create group members. :param email: the email of the user to be invited to the group :type email: string :param group_id: the id or name of the group :type group_id: string :param role: role of the user in the group. One of ``member``, ``editor``, or ``admin`` :type role: string :returns: the newly created yser :rtype: dictionary ''' toolkit.check_access('user_invite', context, data_dict) schema = context.get('schema', logic.schema.default_user_invite_schema()) data, errors = toolkit.navl_validate(data_dict, schema, context) if errors: raise toolkit.ValidationError(errors) model = context['model'] group = model.Group.get(data['group_id']) if not group: raise toolkit.ObjectNotFound() name = logic.action.create._get_random_username_from_email(data['email']) password = str(random.SystemRandom().random()) data['name'] = name data['password'] = password data['state'] = model.State.PENDING user_dict = toolkit.get_action('user_create')(context, data) user = model.User.get(user_dict['id']) member_dict = { 'username': user.id, 'id': data['group_id'], 'role': data['role'] } toolkit.get_action('group_member_create')(context, member_dict) if group.is_organization: group_dict = toolkit.get_action('organization_show')(context, {'id': data['group_id']}) else: group_dict = toolkit.get_action('group_show')(context, {'id': data['group_id']}) mailer.create_reset_key(user) # Email body group_type = (toolkit._('organization') if group_dict['is_organization'] else toolkit._('group')) role = data['role'] extra_vars = { 'reset_link': mailer.get_reset_link(user), 'site_title': config.get('ckan.site_title'), 'site_url': config.get('ckan.site_url'), 'user_name': user.name, 'role_name': authz.roles_trans().get(role, toolkit._(role)), 'group_type': group_type, 'group_title': group_dict.get('title'), } # NOTE: This template is translated body = render_jinja2('emails/invite_user.txt', extra_vars) subject = toolkit._('Invite for {site_title}').format( site_title=config.get('ckan.site_title')) mailer.mail_user(user, subject, body) return model_dictize.user_dictize(user, context)
def metadata_schema_create(context, data_dict): """ Create a new metadata schema. You must be authorized to create metadata schemas. A metadata schema must be one and only one of the following: - the default for the given metadata standard (no organization or infrastructure) - associated with an organization - associated with an infrastructure Any metadata records that are now dependent on this schema are invalidated. :param name: the name of the new metadata schema (optional - auto-generated if not supplied); must conform to standard naming rules :type name: string :param description: the description of the metadata schema (optional) :type description: string :param metadata_standard_id: the id or name of the metadata standard for which this schema is defined :type metadata_standard_id: string :param schema_json: the JSON dictionary defining the schema :type schema_json: string :param organization_id: the id or name of the associated organization (nullable) :type organization_id: string :param infrastructure_id: the id or name of the associated infrastructure (nullable) :type infrastructure_id: string :param deserialize_json: convert JSON string fields to objects in the output dict (optional, default: ``False``) :type deserialize_json: boolean :returns: the newly created metadata schema (unless 'return_id_only' is set to True in the context, in which case just the metadata schema id will be returned) :rtype: dictionary """ log.info("Creating metadata schema: %r", data_dict) tk.check_access('metadata_schema_create', context, data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) return_id_only = context.get('return_id_only', False) deserialize_json = asbool(data_dict.get('deserialize_json')) data, errors = tk.navl_validate(data_dict, schema.metadata_schema_create_schema(), context) if errors: session.rollback() raise tk.ValidationError(errors) metadata_schema = model_save.metadata_schema_dict_save(data, context) # creating the revision also flushes the session which gives us the new object id rev = model.repo.new_revision() rev.author = user if 'message' in context: rev.message = context['message'] else: rev.message = _( u'REST API: Create metadata schema %s') % metadata_schema.id dependent_record_list_context = context.copy() dependent_record_list_context['ignore_auth'] = True dependent_record_list = tk.get_action( 'metadata_schema_dependent_record_list')(dependent_record_list_context, { 'id': metadata_schema.id }) invalidate_context = context.copy() invalidate_context.update({ 'defer_commit': True, 'ignore_auth': True, 'trigger_action': 'metadata_schema_create', 'trigger_object_id': metadata_schema.id, }) for metadata_record_id in dependent_record_list: tk.get_action('metadata_record_invalidate')(invalidate_context, { 'id': metadata_record_id }) if not defer_commit: model.repo.commit() output = metadata_schema.id if return_id_only \ else tk.get_action('metadata_schema_show')(context, {'id': metadata_schema.id, 'deserialize_json': deserialize_json}) return output
def metadata_record_workflow_annotation_create(context, data_dict): """ Add a workflow annotation to a metadata record. You must be authorized to add annotations to the metadata record. This is a wrapper for jsonpatch_create, creating an 'add' patch operation with scope 'workflow'. :param id: the id or name of the metadata record to annotate :type id: string :param key: the key in the augmented metadata record dict at which the annotation value will be set; this cannot be an existing key in the metadata record show schema :type key: string :param value: the JSON object to set at the specified key :type value: string :param deserialize_json: convert JSON string fields to objects in the output dict (optional, default: ``False``) :type deserialize_json: boolean :returns: the newly created workflow annotation (which is a facade to the underlying JSONPatch object) :rtype: dictionary """ log.info("Adding a workflow annotation to a metadata record: %r", data_dict) tk.check_access('metadata_record_workflow_annotation_create', context, data_dict) session = context['session'] data, errors = tk.navl_validate( data_dict, schema.metadata_record_workflow_annotation_create_schema(), context) if errors: session.rollback() raise tk.ValidationError(errors) annotation = tk.get_action('metadata_record_workflow_annotation_show')( context, data_dict) if annotation: raise tk.ValidationError({ 'key': [ _('Duplicate: workflow annotation with the given key already exists on metadata record' ) ] }) deserialize_json = asbool(data_dict.get('deserialize_json')) jsonpatch_context = context.copy() jsonpatch_context.update({ 'schema': schema.metadata_record_workflow_annotation_show_schema( deserialize_json), 'ignore_auth': True, }) jsonpatch_data = { 'model_name': 'metadata_record', 'object_id': data_dict['id'], 'scope': 'workflow', 'operation': { 'op': 'add', 'path': '/' + data_dict['key'], 'value': json.loads(data_dict['value']), }, } return tk.get_action('jsonpatch_create')(jsonpatch_context, jsonpatch_data)
def jsonpatch_update(context, data_dict): """ Update a JSON Patch. It is recommended to call :py:func:`ckan.logic.action.get.jsonpatch_show`, make the desired changes to the result, and then call ``jsonpatch_update()`` with it. For further parameters see :py:func:`~ckanext.jsonpatch.logic.action.jsonpatch_create`. Note: model_name and object_id cannot be modified. The structure of the returned dictionary may be customized by passing 'schema' in the context. :param id: the id of the JSON Patch to update :type id: string :returns: the updated JSON Patch (unless 'return_id_only' is set to True in the context, in which case just the JSON Patch id will be returned) :rtype: dictionary """ log.info("Updating JSON Patch: %r", data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) return_id_only = context.get('return_id_only', False) jsonpatch_id = tk.get_or_bust(data_dict, 'id') jsonpatch = JSONPatch.get(jsonpatch_id) if jsonpatch is not None: jsonpatch_id = jsonpatch.id else: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('JSON Patch'))) tk.check_access('jsonpatch_update', context, data_dict) data_dict.update({ 'id': jsonpatch_id, }) context.update({ 'jsonpatch': jsonpatch, 'allow_partial_update': True, }) data, errors = tk.navl_validate(data_dict, schema.jsonpatch_update_schema(), context) if errors: session.rollback() raise tk.ValidationError(errors) jsonpatch = jsonpatch_dict_save(data, context) rev = model.repo.new_revision() rev.author = user if 'message' in context: rev.message = context['message'] else: rev.message = _(u'REST API: Update JSON Patch %s') % jsonpatch_id if not defer_commit: model.repo.commit() output = jsonpatch_id if return_id_only \ else tk.get_action('jsonpatch_show')(context, {'id': jsonpatch_id}) return output
def workflow_state_create(context, data_dict): """ Create a new workflow state. You must be authorized to create workflow states. :param name: the name of the new workflow state; must conform to standard naming rules :type name: string :param title: the title of the workflow state (optional) :type title: string :param description: the description of the workflow state (optional) :type description: string :param workflow_rules_json: JSON schema against which an augmented metadata record must be validated in order to be assigned this workflow state :type workflow_rules_json: string :param metadata_records_private: determines the private/public status of metadata records that are in this workflow state :type metadata_records_private: boolean :param revert_state_id: the id or name of the state to which a metadata record is reverted in case it no longer fulfils the rules for this state (nullable) :type revert_state_id: string :param deserialize_json: convert JSON string fields to objects in the output dict (optional, default: ``False``) :type deserialize_json: boolean :returns: the newly created workflow state (unless 'return_id_only' is set to True in the context, in which case just the workflow state id will be returned) :rtype: dictionary """ log.info("Creating workflow state: %r", data_dict) tk.check_access('workflow_state_create', context, data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) return_id_only = context.get('return_id_only', False) deserialize_json = asbool(data_dict.get('deserialize_json')) data, errors = tk.navl_validate(data_dict, schema.workflow_state_create_schema(), context) if errors: session.rollback() raise tk.ValidationError(errors) workflow_state = model_save.workflow_state_dict_save(data, context) rev = model.repo.new_revision() rev.author = user if 'message' in context: rev.message = context['message'] else: rev.message = _( u'REST API: Create workflow state %s') % workflow_state.id if not defer_commit: model.repo.commit() output = workflow_state.id if return_id_only \ else tk.get_action('workflow_state_show')(context, {'id': workflow_state.id, 'deserialize_json': deserialize_json}) return output
def role_update(context, data_dict): """ Update a role. You must be a sysadmin to update roles. It is recommended to call :py:func:`ckan.logic.action.get.role_show`, make the desired changes to the result, and then call ``role_update()`` with it. For further parameters see :py:func:`~ckanext.accesscontrol.logic.action.role_create`. :param id: the id or name of the role to update :type id: string :returns: the updated role (unless 'return_id_only' is set to True in the context, in which case just the role id will be returned) :rtype: dictionary """ log.info("Updating role: %r", data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) return_id_only = context.get('return_id_only', False) role_id = tk.get_or_bust(data_dict, 'id') role = extmodel.Role.get(role_id) if role is not None: role_id = role.id else: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('Role'))) tk.check_access('role_update', context, data_dict) data_dict.update({ 'id': role_id, }) context.update({ 'role': role, 'allow_partial_update': True, }) data, errors = tk.navl_validate(data_dict, schema.role_update_schema(), context) if errors: session.rollback() raise tk.ValidationError(errors) role = dictization.role_dict_save(data, context) rev = model.repo.new_revision() rev.author = user if 'message' in context: rev.message = context['message'] else: rev.message = _(u'REST API: Update role %s') % role_id if not defer_commit: model.repo.commit() output = role_id if return_id_only \ else tk.get_action('role_show')(context, {'id': role_id}) return output