예제 #1
0
파일: patch.py 프로젝트: frafra/ckan
def group_patch(context: Context,
                data_dict: DataDict) -> ActionResult.GroupPatch:
    '''Patch a group

    :param id: the id or name of the group
    :type id: string

    The difference between the update and patch methods is that the patch will
    perform an update of the provided parameters, while leaving all other
    parameters unchanged, whereas the update methods deletes all parameters
    not explicitly provided in the data_dict
    '''
    _check_access('group_patch', context, data_dict)

    show_context: Context = {
        'model': context['model'],
        'session': context['session'],
        'user': context['user'],
        'auth_user_obj': context['auth_user_obj'],
    }

    group_dict = _get_action('group_show')(show_context, {
        'id': _get_or_bust(data_dict, 'id')
    })

    patched = dict(group_dict)
    patched.pop('display_name', None)
    patched.update(data_dict)

    patch_context = context.copy()
    patch_context['allow_partial_update'] = True
    return _update.group_update(patch_context, patched)
예제 #2
0
파일: auth.py 프로젝트: tino097/ckan
def datastore_search_sql(context: Context, data_dict: DataDict) -> AuthResult:
    '''need access to view all tables in query'''

    for name in context['table_names']:
        name_auth = datastore_auth(
            context.copy(),  # required because check_access mutates context
            {'id': name},
            'resource_show')
        if not name_auth['success']:
            return {
                'success': False,
                'msg': 'Not authorized to read resource.'}
    return {'success': True}
예제 #3
0
def group_list_dictize(obj_list: Union[Iterable[model.Group],
                                       Iterable[tuple[model.Group, str]]],
                       context: Context,
                       sort_key: Callable[...,
                                          Any] = lambda x: x['display_name'],
                       reverse: bool = False,
                       with_package_counts: bool = True,
                       include_groups: bool = False,
                       include_tags: bool = False,
                       include_extras: bool = False) -> list[dict[str, Any]]:

    group_dictize_context: Context = context.copy()
    # Set options to avoid any SOLR queries for each group, which would
    # slow things further.
    group_dictize_options: dict[str, Any] = {
        'packages_field': 'dataset_count' if with_package_counts else None,
        # don't allow packages_field='datasets' as it is too slow
        'include_groups': include_groups,
        'include_tags': include_tags,
        'include_extras': include_extras,
        'include_users': False,  # too slow - don't allow
    }
    if with_package_counts and 'dataset_counts' not in group_dictize_context:
        # 'dataset_counts' will already be in the context in the case that
        # group_list_dictize recurses via group_dictize (groups in groups)
        group_dictize_context['dataset_counts'] = get_group_dataset_counts()
    if context.get('with_capacity'):
        group_list = [
            group_dictize(group,
                          group_dictize_context,
                          capacity=capacity,
                          **group_dictize_options) for group, capacity in cast(
                              "list[tuple[model.Group, str]]", obj_list)
        ]
    else:
        group_list = [
            group_dictize(group, group_dictize_context,
                          **group_dictize_options)
            for group in cast("list[model.Group]", obj_list)
        ]

    return sorted(group_list, key=sort_key, reverse=reverse)
예제 #4
0
파일: update.py 프로젝트: tino097/ckan
def package_revise(context: Context, data_dict: DataDict) -> ActionResult.PackageRevise:
    '''Revise a dataset (package) selectively with match, filter and
    update parameters.

    You must be authorized to edit the dataset and the groups that it belongs
    to.

    :param match: a dict containing "id" or "name" values of the dataset
                  to update, all values provided must match the current
                  dataset values or a ValidationError will be raised. e.g.
                  ``{"name": "my-data", "resources": {["name": "big.csv"]}}``
                  would abort if the my-data dataset's first resource name
                  is not "big.csv".
    :type match: dict
    :param filter: a list of string patterns of fields to remove from the
                   current dataset. e.g. ``"-resources__1"`` would remove the
                   second resource, ``"+title, +resources, -*"`` would
                   remove all fields at the dataset level except title and
                   all resources (default: ``[]``)
    :type filter: comma-separated string patterns or list of string patterns
    :param update: a dict with values to update/create after filtering
                   e.g. ``{"resources": [{"description": "file here"}]}`` would
                   update the description for the first resource
    :type update: dict
    :param include: a list of string pattern of fields to include in response
                    e.g. ``"-*"`` to return nothing (default: ``[]`` all
                    fields returned)
    :type include: comma-separated-string patterns or list of string patterns

    ``match`` and ``update`` parameters may also be passed as "flattened keys", using
    either the item numeric index or its unique id (with a minimum of 5 characters), e.g.
    ``update__resource__1f9ab__description="guidebook"`` would set the
    description of the resource with id starting with "1f9ab" to "guidebook", and
    ``update__resource__-1__description="guidebook"`` would do the same
    on the last resource in the dataset.

    The ``extend`` suffix can also be used on the update parameter to add
    a new item to a list, e.g. ``update__resources__extend=[{"name": "new resource", "url": "https://example.com"}]``
    will add a new resource to the dataset with the provided ``name`` and ``url``.

    Usage examples:

    * Change description in dataset, checking for old description::

        match={"notes": "old notes", "name": "xyz"}
        update={"notes": "new notes"}

    * Identical to above, but using flattened keys::

        match__name="xyz"
        match__notes="old notes"
        update__notes="new notes"

    * Replace all fields at dataset level only, keep resources (note: dataset id
      and type fields can't be deleted) ::

        match={"id": "1234abc-1420-cbad-1922"}
        filter=["+resources", "-*"]
        update={"name": "fresh-start", "title": "Fresh Start"}

    * Add a new resource (__extend on flattened key)::

        match={"id": "abc0123-1420-cbad-1922"}
        update__resources__extend=[{"name": "new resource", "url": "http://example.com"}]

    * Update a resource by its index::

        match={"name": "my-data"}
        update__resources__0={"name": "new name, first resource"}

    * Update a resource by its id (provide at least 5 characters)::

        match={"name": "their-data"}
        update__resources__19cfad={"description": "right one for sure"}

    * Replace all fields of a resource::

        match={"id": "34a12bc-1420-cbad-1922"}
        filter=["+resources__1492a__id", "-resources__1492a__*"]
        update__resources__1492a={"name": "edits here", "url": "http://example.com"}


    :returns: a dict containing 'package':the updated dataset with fields filtered by include parameter
    :rtype: dictionary

    '''
    model = context['model']

    schema = schema_.package_revise_schema()
    data, errors = _validate(data_dict, schema, context)
    if errors:
        model.Session.rollback()
        raise ValidationError(errors)

    name_or_id = (
        data['match__'].get('id') or
        data.get('match', {}).get('id') or
        data['match__'].get('name') or
        data.get('match', {}).get('name'))
    if name_or_id is None:
        raise ValidationError({'match__id': _('Missing value')})

    package_show_context = context.copy()
    package_show_context['for_update'] = True
    orig = _get_action('package_show')(
        package_show_context,
        {'id': name_or_id})

    pkg = package_show_context['package']  # side-effect of package_show

    unmatched = []
    if 'match' in data:
        unmatched.extend(dfunc.check_dict(orig, data['match']))

    for k, v in sorted(data['match__'].items()):
        unmatched.extend(dfunc.check_string_key(orig, k, v))

    if unmatched:
        model.Session.rollback()
        raise ValidationError({'message': [{'match': [
            '__'.join(str(p) for p in unm)
            for unm in unmatched
        ]}]})

    if 'filter' in data:
        orig_id = orig['id']
        dfunc.filter_glob_match(orig, data['filter'])
        orig['id'] = orig_id

    if 'update' in data:
        try:
            dfunc.update_merge_dict(orig, data['update'])
        except dfunc.DataError as de:
            model.Session.rollback()
            raise ValidationError({'message': [{'update': [de.error]}]})

    # update __extend keys before __#__* so that files may be
    # attached to newly added resources in the same call
    for k, v in sorted(
            data['update__'].items(),
            key=lambda s: s[0][-6] if s[0].endswith('extend') else s[0]):
        try:
            dfunc.update_merge_string_key(orig, k, v)
        except dfunc.DataError as de:
            model.Session.rollback()
            raise ValidationError({'message': [{k: [de.error]}]})

    _check_access('package_revise', context, {"update": orig})

    # future-proof return dict by putting package data under
    # "package". We will want to return activity info
    # on update or "nothing changed" status once possible
    rval = {
        'package': _get_action('package_update')(
            cast(Context, dict(context, package=pkg)),
            orig)}
    if 'include' in data_dict:
        dfunc.filter_glob_match(rval, data_dict['include'])
    return rval
예제 #5
0
파일: update.py 프로젝트: tino097/ckan
def package_update(
        context: Context, data_dict: DataDict) -> ActionResult.PackageUpdate:
    '''Update a dataset (package).

    You must be authorized to edit the dataset and the groups that it belongs
    to.

    .. note:: Update methods may delete parameters not explicitly provided in the
        data_dict. If you want to edit only a specific attribute use `package_patch`
        instead.

    It is recommended to call
    :py:func:`ckan.logic.action.get.package_show`, make the desired changes to
    the result, and then call ``package_update()`` with it.

    Plugins may change the parameters of this function depending on the value
    of the dataset's ``type`` attribute, see the
    :py:class:`~ckan.plugins.interfaces.IDatasetForm` plugin interface.

    For further parameters see
    :py:func:`~ckan.logic.action.create.package_create`.

    :param id: the name or id of the dataset to update
    :type id: string

    :returns: the updated dataset (if ``'return_package_dict'`` is ``True`` in
              the context, which is the default. Otherwise returns just the
              dataset id)
    :rtype: dictionary

    '''
    model = context['model']
    name_or_id = data_dict.get('id') or data_dict.get('name')
    if name_or_id is None:
        raise ValidationError({'id': _('Missing value')})

    pkg = model.Package.get(name_or_id)
    if pkg is None:
        raise NotFound(_('Package was not found.'))
    context["package"] = pkg

    # immutable fields
    data_dict["id"] = pkg.id
    data_dict['type'] = pkg.type

    _check_access('package_update', context, data_dict)

    user = context['user']
    # get the schema

    package_plugin = lib_plugins.lookup_package_plugin(pkg.type)
    schema = context.get('schema') or package_plugin.update_package_schema()
    if 'api_version' not in context:
        # check_data_dict() is deprecated. If the package_plugin has a
        # check_data_dict() we'll call it, if it doesn't have the method we'll
        # do nothing.
        check_data_dict = getattr(package_plugin, 'check_data_dict', None)
        if check_data_dict:
            try:
                package_plugin.check_data_dict(data_dict, schema)
            except TypeError:
                # Old plugins do not support passing the schema so we need
                # to ensure they still work.
                package_plugin.check_data_dict(data_dict)

    resource_uploads = []
    for resource in data_dict.get('resources', []):
        # file uploads/clearing
        upload = uploader.get_resource_uploader(resource)

        if 'mimetype' not in resource:
            if hasattr(upload, 'mimetype'):
                resource['mimetype'] = upload.mimetype

        if 'size' not in resource and 'url_type' in resource:
            if hasattr(upload, 'filesize'):
                resource['size'] = upload.filesize

        resource_uploads.append(upload)

    data, errors = lib_plugins.plugin_validate(
        package_plugin, context, data_dict, schema, 'package_update')
    log.debug('package_update validate_errs=%r user=%s package=%s data=%r',
              errors, user, context['package'].name, data)

    if errors:
        model.Session.rollback()
        raise ValidationError(errors)

    #avoid revisioning by updating directly
    model.Session.query(model.Package).filter_by(id=pkg.id).update(
        {"metadata_modified": datetime.datetime.utcnow()})
    model.Session.refresh(pkg)

    pkg = model_save.package_dict_save(data, context)

    context_org_update = context.copy()
    context_org_update['ignore_auth'] = True
    context_org_update['defer_commit'] = True
    _get_action('package_owner_org_update')(context_org_update,
                                            {'id': pkg.id,
                                             'organization_id': pkg.owner_org})

    # Needed to let extensions know the new resources ids
    model.Session.flush()
    for index, (resource, upload) in enumerate(
            zip(data.get('resources', []), resource_uploads)):
        resource['id'] = pkg.resources[index].id

        upload.upload(resource['id'], uploader.get_max_resource_size())

    for item in plugins.PluginImplementations(plugins.IPackageController):
        item.edit(pkg)

        item.after_dataset_update(context, data)

    if not context.get('defer_commit'):
        model.repo.commit()

    log.debug('Updated object %s' % pkg.name)

    return_id_only = context.get('return_id_only', False)

    # Make sure that a user provided schema is not used on package_show
    context.pop('schema', None)

    # we could update the dataset so we should still be able to read it.
    context['ignore_auth'] = True
    output = data_dict['id'] if return_id_only \
            else _get_action('package_show')(context, {'id': data_dict['id']})

    return output