def basket_clear(context, data_dict): """Clear the baskets datasets in user home directory (needs ckanext-localimp) :param user_id: The id of the user to create the basket for (only admin) :type user_id: string :param basket_id: basket_id of basket to clear :type basket_id: string :param packages: Packages to export from basket (user can choose which packages to export) (optional) :type paths: list of strings :returns: """ bsk_dct = tk.get_action("basket_show")( context, { "id": tk.get_or_bust(data_dict, 'basket_id'), "include_elements": True }) if "packages" not in data_dict: data_dict['packages'] = tk.get_or_bust(bsk_dct, "packages") pkg_in_basket = [ ele for ele in data_dict['packages'] if ele in bsk_dct.get('packages', None) ] for pkg_id in pkg_in_basket: tk.get_action("localimp_remove_symlink")( context, { "id": pkg_id, "directory_name": bsk_dct.get("name", None) })
def dataontosearch_tag_delete(context, data_dict): ''' Remove an existing association between the specified dataset and concept. :param dataset: Name or ID of the dataset to disassociate with a concept :type dataset: string :param concept: RDF URI or human-readable label for the concept to no longer associate with the dataset :type dataset: string :return: True :rtype: bool ''' toolkit.check_access(u'dataontosearch_tag_delete', context, data_dict) # Extract parameters from data_dict dataset_id_or_name = toolkit.get_or_bust(data_dict, u'dataset') concept_url_or_label = toolkit.get_or_bust(data_dict, u'concept') # What dataset is specified? dataset = toolkit.get_action(u'package_show')(None, { u'id': dataset_id_or_name, }) dataset_rdf_uri = dataset_uri(dataset) # Make the request r = make_tagger_delete_request(u'/tag', { u'dataset_id': dataset_rdf_uri, u'concept': concept_url_or_label, }) r.raise_for_status() data = r.json() return data[u'success']
def dataontosearch_tag_create(context, data_dict): ''' Create a new association between the specified dataset and concept. :param dataset: Name or ID of the dataset to associate with a concept :type dataset: string :param concept: RDF URI or human-readable label for the concept to associate with the dataset :type dataset: string :return: The dataset, concept and id for the newly created tag :rtype: dictionary ''' toolkit.check_access(u'dataontosearch_tag_create', context, data_dict) # Extract parameters from data_dict dataset_id_or_name = toolkit.get_or_bust(data_dict, u'dataset') concept_url_or_label = toolkit.get_or_bust(data_dict, u'concept') # We must provide DataOntoSearch with a URL of where to download metadata, # so generate this URL. First, what dataset was specified? dataset = toolkit.get_action(u'package_show')(None, { u'id': dataset_id_or_name }) # We assume the RDF is available at the usual dataset URL, but with a # .rdf suffix dataset_id = dataset.get(u'id') dataset_url = toolkit.url_for(u'dataset_read', id=dataset_id, qualified=True) rdf_url = u'{}.rdf'.format(dataset_url) # Now we are equipped to actually create the tag r = make_tagger_post_request(u'/tag', { u'dataset_url': rdf_url, u'concept': concept_url_or_label, }) r.raise_for_status() # Handle response data = r.json() if not data[u'success']: raise RuntimeError(data[u'message']) return { u'dataset': dataset_id, u'concept': concept_url_or_label, u'id': data[u'id'], }
def ogdch_autosuggest(context, data_dict): q = get_or_bust(data_dict, 'q') lang = get_or_bust(data_dict, 'lang') fq = data_dict.get('fq', '') if fq: fq = 'NOT private AND %s' % fq else: fq = 'NOT private' # parse language from values like de_CH if len(lang) > 2: lang = lang[:2] if lang not in ['en', 'it', 'de', 'fr']: raise ValidationError('lang must be one of [en, it, de, fr]') handler = '/suggest_%s' % lang suggester = 'ckanSuggester_%s' % lang solr = make_connection() try: log.debug( 'Loading suggestions for %s (lang: %s, fq: %s)' % (q, lang, fq) ) results = solr.search( '', search_handler=handler, **{'suggest.q': q, 'suggest.count': 10, 'suggest.cfq': fq} ) suggestions = results.raw_response['suggest'][suggester].values()[0] # noqa def highlight(term, q): if '<b>' in term: return term clean_q = unidecode(q) clean_term = unidecode(term) re_q = re.escape(clean_q) m = re.search(re_q, clean_term, re.I) if m: replace_text = term[m.start():m.end()] term = term.replace(replace_text, '<b>%s</b>' % replace_text) return term terms = [highlight(suggestion['term'], q) for suggestion in suggestions['suggestions']] # noqa return list(set(terms)) except pysolr.SolrError as e: log.exception('Could not load suggestions from solr: %s' % e) raise ActionError('Error retrieving suggestions from solr')
def role_delete(context, data_dict): """ Delete a role. You must be a sysadmin to delete roles. :param id: the id of the role to delete :type id: string """ log.info("Deleting role: %r", data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) role_id = tk.get_or_bust(data_dict, 'id') role = extmodel.Role.get(role_id) if role is not None: role_id = role.id else: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('Role'))) tk.check_access('role_delete', context, data_dict) rev = model.repo.new_revision() rev.author = user rev.message = _(u'REST API: Delete Role %s') % role_id role.delete() if not defer_commit: model.repo.commit()
def cadasta_upload_resource(context, data_dict): resource_type = toolkit.get_or_bust(data_dict, 'resource_type') if resource_type not in ['parcel', 'party', 'relationship', 'project']: raise toolkit.ValidationError(['Not a valid resource_type']) permission = 'upload_{0}_resource'.format(resource_type) return has_permission_for_project(context, data_dict, permission, 'project_id')
def search_add(context, data_dict): ''' Add an item to the search_history for the current user. :param content: Search query to add to history :type content: string ''' try: tk.check_access('ckanext_search_history_add', context, data_dict) except tk.NotAuthorized: #JOE# #tk.abort(401, tk._('Not authorized to add history item')) pass if db.search_history_table is None: db.init_db(context['model']) content = tk.get_or_bust(data_dict, 'content') username = context.get('user') #JOE# #user_id = new_authz.get_user_id_for_username(username, allow_none=False) user_id = new_authz.get_user_id_for_username(username, allow_none=True) search_history = db.SearchHistory() search_history.content = content search_history.user_id = user_id session = context['session'] session.add(search_history) session.commit() return db.table_dictize(search_history, context)
def resource_schema_pkey_create(context, data_dict): '''Add a primary key to a resource's schema. :param resource_id: the ID of the resource :type resource_id: string :param pkey: the primary key, either the name of one of the fields or a list of field names from the resource's schema :type pkey: string or iterable of strings :returns: the primary key that was created :rtype: string or list of strings ''' # Fail if the resource already has a primary key. resource_id = toolkit.get_or_bust(data_dict, 'resource_id') try: pkey = toolkit.get_action('resource_schema_pkey_show')(context, {'resource_id': resource_id}) except exceptions.InvalidResourceIDException: raise toolkit.ValidationError(toolkit._("Invalid resource_id")) if pkey is not None: raise toolkit.ValidationError(toolkit._("The resource already has a " "primary key")) # Otherwise create is the same as update. return toolkit.get_action('resource_schema_pkey_update')(context, data_dict)
def role_show(context, data_dict): """ Return a role definition. You must be a sysadmin to view roles. :param id: the id of the role :type id: string :rtype: dictionary """ log.debug("Retrieving role: %r", data_dict) role_id = tk.get_or_bust(data_dict, 'id') role = extmodel.Role.get(role_id) if role is not None: role_id = role.id else: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('Role'))) tk.check_access('role_show', context, data_dict) context['role'] = role role_dict = dictization.role_dictize(role, context) result_dict, errors = tk.navl_validate(role_dict, schema.role_show_schema(), context) return result_dict
def role_permission_list(context, data_dict): """ Return a list of permissions for a role. You must be a sysadmin to list role permissions. :param role_id: the id or name of the role :type role_id: string :returns: list of dicts """ log.debug("Retrieving role permission list: %r", data_dict) tk.check_access('role_permission_list', context, data_dict) session = context['session'] role_id = tk.get_or_bust(data_dict, 'role_id') role = extmodel.Role.get(role_id) if role is not None: role_id = role.id else: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('Role'))) permissions = session.query(extmodel.Permission) \ .join(extmodel.RolePermission, and_(extmodel.Permission.content_type == extmodel.RolePermission.content_type, extmodel.Permission.operation == extmodel.RolePermission.operation)) \ .filter(extmodel.RolePermission.role_id == role_id) \ .filter(extmodel.RolePermission.state == 'active') \ .all() return dictization.permission_list_dictize(permissions, context)
def inventory_entry_list(context, data_dict): '''Return a list of inventory entries. :param name: organization name :type name: string :rtype: list of dictionaries ''' # TODO @palcu: define this # check_access('inventory_manage', context, data_dict) model = context['model'] name = get_or_bust(data_dict, 'name') organization = model.Group.get(name) if not organization: raise ObjectNotFound('Organization was not found') entries = [ table_dictize(entry, context) for entry in organization.inventory_entries] for entry in entries: entry['next_deadline_timestamp'] = None if entry['last_added_dataset_timestamp']: last_added = _datestamp_to_datetime(entry['last_added_dataset_timestamp']) delta = timedelta(days=entry['recurring_interval']) entry['next_deadline_timestamp'] = last_added + delta return entries
def project_admin_add(context, data_dict): """Add a user to the list of project admins. :param username: name of the user to add to project user admin list :type username: string """ toolkit.check_access("ckanext_project_admin_add", context, data_dict) # validate the incoming data_dict validated_data_dict, errors = validate(data_dict, project_admin_add_schema(), context) username = toolkit.get_or_bust(validated_data_dict, "username") try: user_id = convert_user_name_or_id_to_id(username, context) except toolkit.Invalid: raise toolkit.ObjectNotFound if errors: raise toolkit.ValidationError(errors) if projectAdmin.exists(user_id=user_id): raise toolkit.ValidationError( "projectAdmin with user_id '{0}' already exists.".format(user_id), error_summary=u"User '{0}' is already a project Admin.".format(username), ) # create project admin entry return projectAdmin.create(user_id=user_id)
def project_package_association_create(context, data_dict): """Create an association between a project and a package. :param project_id: id or name of the project to associate :type project_id: string :param package_id: id or name of the package to associate :type package_id: string """ toolkit.check_access("ckanext_project_package_association_create", context, data_dict) # validate the incoming data_dict validated_data_dict, errors = validate(data_dict, project_package_association_create_schema(), context) if errors: raise toolkit.ValidationError(errors) package_id, project_id = toolkit.get_or_bust(validated_data_dict, ["package_id", "project_id"]) if projectPackageAssociation.exists(package_id=package_id, project_id=project_id): raise toolkit.ValidationError( "projectPackageAssociation with package_id '{0}' and project_id '{1}' already exists.".format( package_id, project_id ), error_summary=u"The dataset, {0}, is already in the project".format( convert_package_name_or_id_to_title_or_name(package_id, context) ), ) # create the association return projectPackageAssociation.create(package_id=package_id, project_id=project_id)
def showcase_package_association_create(context, data_dict): '''Create an association between a showcase and a package. :param showcase_id: id or name of the showcase to associate :type showcase_id: string :param package_id: id or name of the package to associate :type package_id: string ''' toolkit.check_access('ckanext_showcase_package_association_create', context, data_dict) # validate the incoming data_dict validated_data_dict, errors = validate( data_dict, showcase_package_association_create_schema(), context) if errors: raise toolkit.ValidationError(errors) package_id, showcase_id = toolkit.get_or_bust(validated_data_dict, ['package_id', 'showcase_id']) if ShowcasePackageAssociation.exists(package_id=package_id, showcase_id=showcase_id): raise toolkit.ValidationError("ShowcasePackageAssociation with package_id '{0}' and showcase_id '{1}' already exists.".format(package_id, showcase_id), error_summary=u"The dataset, {0}, is already in the showcase".format(convert_package_name_or_id_to_title_or_name(package_id, context))) # create the association return ShowcasePackageAssociation.create(package_id=package_id, showcase_id=showcase_id)
def ogdch_dataset_terms_of_use(context, data_dict): ''' Returns the terms of use for the requested dataset. By definition the terms of use of a dataset corresponds to the least open rights statement of all distributions of the dataset ''' terms = [ 'NonCommercialAllowed-CommercialAllowed-ReferenceNotRequired', 'NonCommercialAllowed-CommercialAllowed-ReferenceRequired', 'NonCommercialAllowed-CommercialWithPermission-ReferenceNotRequired', 'NonCommercialAllowed-CommercialWithPermission-ReferenceRequired', 'ClosedData', ] user = tk.get_action('get_site_user')({'ignore_auth': True}, {}) req_context = {'user': user['name']} pkg_id = get_or_bust(data_dict, 'id') pkg = tk.get_action('package_show')(req_context, {'id': pkg_id}) least_open = None for res in pkg['resources']: if res['rights'] not in terms: least_open = 'ClosedData' break if least_open is None: least_open = res['rights'] continue if terms.index(res['rights']) > terms.index(least_open): least_open = res['rights'] if least_open is None: least_open = 'ClosedData' return {'dataset_rights': least_open}
def showcase_admin_add(context, data_dict): '''Add a user to the list of showcase admins. :param username: name of the user to add to showcase user admin list :type username: string ''' toolkit.check_access('ckanext_showcase_admin_add', context, data_dict) # validate the incoming data_dict validated_data_dict, errors = validate(data_dict, showcase_admin_add_schema(), context) username = toolkit.get_or_bust(validated_data_dict, 'username') try: user_id = convert_user_name_or_id_to_id(username, context) except toolkit.Invalid: raise toolkit.ObjectNotFound if errors: raise toolkit.ValidationError(errors) if ShowcaseAdmin.exists(user_id=user_id): raise toolkit.ValidationError( "ShowcaseAdmin with user_id '{0}' already exists.".format(user_id), error_summary=u"User '{0}' is already a Showcase Admin.".format( username)) # create showcase admin entry return ShowcaseAdmin.create(user_id=user_id)
def setup_template_variables(self, context, data_dict): """Setup variables available to templates""" # log.debug(pprint.pprint(data_dict)) hdl = HandleService() # Author name author_name = data_dict['package'].get('citation_info', '') if not author_name: author_name = 'Author name' # Publication year publication_year = data_dict['package'].get('iso_pubDate', '') if not publication_year: publication_year = "Publication year" else: publication_year = h.date_str_to_datetime(publication_year).year res_name = data_dict['resource'].get('name', '') res_id = tk.get_or_bust(data_dict['resource'], 'id') ver_number = tk.get_action('resource_version_number')(context, { 'id': res_id }) res_pid = data_dict['resource'].get(hdl.resource_field, '') access_date = datetime.datetime.now() tpl_variables = { 'author_name': author_name, 'publication_year': publication_year, 'res_name': res_name, 'ver_number': ver_number, 'res_pid': res_pid, 'access_date': access_date } return tpl_variables
def dataontosearch_tag_list(context, data_dict): ''' List concepts associated with the specified dataset. :param id: id or name of the dataset to fetch tags for :type id: string :rtype: list of concepts. Each concept is a dict, with 'label' being human-readable label and 'uri' being the URI identifying this concept ''' toolkit.check_access(u'dataontosearch_tag_list', context, data_dict) # What dataset is specified? dataset_id_or_name = toolkit.get_or_bust(data_dict, u'id') dataset = toolkit.get_action(u'package_show')(None, { u'id': dataset_id_or_name }) # Generate the RDF URI for this dataset, using the very same code used by # ckanext-dcat. We need this to be consistent with what DataOntoSearch found # when it retrieved the dataset RDF, thus this use of the internal DCAT API. dataset_rdf_uri = dataset_uri(dataset) r = make_tagger_get_request(u'/tag', {u'dataset_id': dataset_rdf_uri}) r.raise_for_status() data = r.json() if data is None: return [] else: return data[u'concepts']
def dataset_release_update(context, data_dict): """Update a release of the current dataset. :param dataset: the id or name of the dataset :type dataset: string :param release: the id of the release :type release: string :param name: A short name for the release :type name: string :param description: A description for the release :type description: string :returns: the edited release :rtype: dictionary """ release, name, dataset_name_or_id = toolkit.get_or_bust( data_dict, ['release', 'name', 'dataset']) toolkit.check_access('dataset_release_create', context, data_dict) assert context.get('auth_user_obj') # Should be here after `check_access` backend = get_metastore_backend() author = create_author_from_context(context) try: release_info = backend.tag_update( _get_dataset_name(dataset_name_or_id), release, new_name=name, new_description=data_dict.get('description', None), author=author) except exc.NotFound: raise toolkit.ObjectNotFound("Dataset release not found.") log.info('Release "%s" with id %s modified successfully', name, release) return tag_to_dict(release_info)
def workflow_annotation_delete(context, data_dict): """ Delete a workflow annotation. You must be authorized to delete the workflow annotation. :param id: the id or name of the workflow annotation to delete :type id: string """ log.info("Deleting workflow annotation: %r", data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) workflow_annotation_id = tk.get_or_bust(data_dict, 'id') workflow_annotation = ckanext_model.WorkflowAnnotation.get( workflow_annotation_id) if workflow_annotation is not None: workflow_annotation_id = workflow_annotation.id else: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('Workflow Annotation'))) tk.check_access('workflow_annotation_delete', context, data_dict) rev = model.repo.new_revision() rev.author = user rev.message = _( u'REST API: Delete workflow annotation %s') % workflow_annotation_id workflow_annotation.delete() if not defer_commit: model.repo.commit()
def access_request_update(context, data_dict): user = context.get('user') request_id = toolkit.get_or_bust(data_dict, "id") request = model.Session.query(AccessRequest).get(request_id) if not request: raise toolkit.ObjectNotFound("Access Request not found") if request.object_type not in ['organization', 'package', 'user']: raise toolkit.Invalid("Unknown Object Type") if request.object_type == 'package': package = toolkit.get_action('package_show')(context, { 'id': request.object_id }) org_id = package['owner_org'] return { 'success': has_user_permission_for_group_or_org(org_id, user, 'admin') } elif request.object_type == 'organization': org_id = request.object_id return { 'success': has_user_permission_for_group_or_org(org_id, user, 'admin') } elif request.object_type == 'user': data_dict = { 'id': request.object_id, 'renew_expiry_date': request.data.get( 'user_request_type', USER_REQUEST_TYPE_NEW) == USER_REQUEST_TYPE_RENEWAL } return external_user_update_state(context, data_dict)
def upload_multipart(context, data_dict): h.check_access('cloudstorage_upload_multipart', data_dict) upload_id, part_number, part_content = toolkit.get_or_bust( data_dict, ['uploadId', 'partNumber', 'upload']) uploader = ResourceCloudStorage({}) upload = model.Session.query(MultipartUpload).get(upload_id) data = bytearray(_get_underlying_file(part_content).read()) resp = uploader.driver.connection.request( _get_object_url(uploader, upload.name), params={ 'uploadId': upload_id, 'partNumber': part_number }, method='PUT', data=data, headers={'Content-Length': len(data)}) if resp.status != 200: raise toolkit.ValidationError('Upload failed: part %s' % part_number) _save_part_info(part_number, resp.headers['etag'], upload) return {'partNumber': part_number, 'ETag': resp.headers['etag']}
def contribution_activity_update(context, data_dict): ''' Updates a :class:`~ckanext.attribution.model.contribution_activity.ContributionActivity` record, linked to a package and an agent via package_contribution_activity and agent_contribution_activity records (respectively). These link records are also updated as part of this action, as the activity should not exist without the package or agent. :param id: ID of the record to update :type id: str :param activity: short (one/two words) description for the activity :type activity: str, optional :param scheme: name of the role/activity taxonomy, e.g. credit or datacite :type scheme: str, optional :param level: lead, equal, or supporting :type level: str, optional :param time: time activity took place :type time: datetime.datetime, optional :param context: :param data_dict: :returns: New contribution activity record. :rtype: dict ''' toolkit.check_access('contribution_activity_update', context, data_dict) item_id = toolkit.get_or_bust(data_dict, 'id') data_dict = ContributionActivityQuery.validate(data_dict) new_activity = ContributionActivityQuery.update(item_id, **data_dict) return new_activity.as_dict()
def inventory_entry_list(context, data_dict): '''Return a list of inventory entries. :param name: organization name :type name: string :rtype: list of dictionaries ''' # TODO @palcu: define this # check_access('inventory_manage', context, data_dict) model = context['model'] name = get_or_bust(data_dict, 'name') organization = model.Group.get(name) if not organization: raise ObjectNotFound('Organization was not found') entries = [ table_dictize(entry, context) for entry in organization.inventory_entries ] for entry in entries: entry['next_deadline_timestamp'] = None if entry['last_added_dataset_timestamp']: last_added = _datestamp_to_datetime( entry['last_added_dataset_timestamp']) delta = timedelta(days=entry['recurring_interval']) entry['next_deadline_timestamp'] = last_added + delta return entries
def infrastructure_delete(context, data_dict): """ Delete an infrastructure. You must be authorized to delete the infrastructure. :param id: the id or name of the infrastructure to delete :type id: string """ log.info("Deleting infrastructure: %r", data_dict) session = context['session'] model = context['model'] user = context['user'] infrastructure_id = tk.get_or_bust(data_dict, 'id') infrastructure = model.Group.get(infrastructure_id) if infrastructure is not None and infrastructure.type == 'infrastructure': infrastructure_id = infrastructure.id else: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('Project'))) tk.check_access('infrastructure_delete', context, data_dict) if session.query(model.Member) \ .join(model.Group, model.Group.id == model.Member.table_id) \ .filter(model.Member.group_id == infrastructure_id) \ .filter(model.Member.table_name == 'group') \ .filter(model.Member.state != 'deleted') \ .filter(model.Group.type == 'metadata_collection') \ .filter(model.Group.state != 'deleted') \ .count() > 0: raise tk.ValidationError( _('Project has dependent metadata collections')) # cascade delete to dependent metadata schemas cascade_context = { 'model': model, 'user': user, 'session': session, 'defer_commit': True, 'ignore_auth': True, } metadata_schema_ids = session.query(ckanext_model.MetadataSchema.id) \ .filter(ckanext_model.MetadataSchema.infrastructure_id == infrastructure_id) \ .filter(ckanext_model.MetadataSchema.state != 'deleted') \ .all() for (metadata_schema_id, ) in metadata_schema_ids: tk.get_action('metadata_schema_delete')(cascade_context, { 'id': metadata_schema_id }) data_dict['type'] = 'infrastructure' group_context = context.copy() group_context.update({ 'invoked_action': 'infrastructure_delete', 'ignore_auth': True, }) tk.get_action('group_delete')(group_context, data_dict)
def resource_show_revision(context, data_dict): """Show a resource from a specified revision Takes the same arguments as 'resource_show' but with an additional revision_ref parameter :param id: the id of the resource :type id: string :param revision_ref: the ID of the revision or release name :type revision_ref: string :returns: A resource dict :rtype: dict """ revision_ref = _get_revision_ref(data_dict) if revision_ref is None: return core_resource_show(context, data_dict) model = context['model'] id = toolkit.get_or_bust(data_dict, 'id') resource = model.Resource.get(id) package = _get_package_in_revision(context, {'id': resource.package_id}, revision_ref) resource_dict = h.find_resource_in_package(package, id) if resource_dict is None: raise toolkit.ObjectNotFound("Resource not found for dataset revision") return resource_dict
def showcase_admin_remove(context, data_dict): '''Remove a user to the list of showcase admins. :param username: name of the user to remove from showcase user admin list :type username: string ''' model = context['model'] toolkit.check_access('ckanext_showcase_admin_remove', context, data_dict) # validate the incoming data_dict validated_data_dict, errors = validate(data_dict, showcase_admin_remove_schema(), context) if errors: raise toolkit.ValidationError(errors) username = toolkit.get_or_bust(validated_data_dict, 'username') user_id = convert_user_name_or_id_to_id(username, context) showcase_admin_to_remove = ShowcaseAdmin.get(user_id=user_id) if showcase_admin_to_remove is None: raise toolkit.ObjectNotFound("ShowcaseAdmin with user_id '{0}' doesn't exist.".format(user_id)) showcase_admin_to_remove.delete() model.repo.commit()
def dataset_revert(context, data_dict): """Reverts a dataset to a specified revision or release param dataset: the dataset name or ID to be reverted type dataset: string param revision_ref: the release or revision to revert to type revision_ref: string """ dataset_id, revision_ref = toolkit.get_or_bust(data_dict, ['dataset', 'revision_ref']) toolkit.check_access('dataset_revert', context, data_dict) assert context.get('auth_user_obj') # Should be here after `check_access` revision_dict = toolkit.get_action('package_show')( context, { 'id': dataset_id, 'revision_ref': revision_ref }) reverted_dataset = toolkit.get_action('package_update')(context, revision_dict) log.info('Package %s reverted to revision %s', dataset_id, revision_ref) return reverted_dataset
def infrastructure_member_delete(context, data_dict): """ Remove a user from an infrastructure. You must be authorized to edit the infrastructure. :param id: the id or name of the infrastructure :type id: string :param username: name or id of the user :type username: string """ log.info("Deleting a user's membership of an infrastructure: %r", data_dict) tk.check_access('infrastructure_member_delete', context, data_dict) model = context['model'] infrastructure_id = tk.get_or_bust(data_dict, 'id') username = data_dict.get('username') or data_dict.get('user_id') infrastructure = model.Group.get(infrastructure_id) if infrastructure is not None and infrastructure.type == 'infrastructure': infrastructure_id = infrastructure.id else: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('Project'))) member_dict = { 'id': infrastructure_id, 'object': username, 'object_type': 'user', } member_context = context.copy() member_context['ignore_auth'] = True return tk.get_action('member_delete')(member_context, member_dict)
def version_show(context, data_dict): """Show a specific version object :param version_id: the id or name of the version :type version_id: string :param dataset_id: [Optional] the id or name of a dataset. Mandatory if version name provided as version_id :type dataset_id: string :returns: the version dictionary :rtype: dict """ model = context.get('model', core_model) version_name_or_id = toolkit.get_or_bust(data_dict, ['version_id']) version = model.Session.query(Version).get(version_name_or_id) if not version: version_name = version_name_or_id dataset_name_or_id = data_dict.get('dataset_id') dataset = model.Package.get(dataset_name_or_id) if dataset: dataset_id = dataset.id version = model.Session.query(Version). \ filter(Version.package_id == dataset_id). \ filter(Version.name == version_name).one_or_none() if not version: raise toolkit.ObjectNotFound('Version not found') toolkit.check_access('version_show', context, {"package_id": version.package_id}) return version.as_dict()
def dataset_release_diff(context, data_dict): '''Returns a diff between two dataset releases :param id: the id of the dataset :type id: string :param revision_ref_1: the id of the first release to compare :type id: string :param revision_ref_2: the id of the second release to compare :type id: string :param diff_type: 'unified', 'context', 'html' :type diff_type: string ''' dataset_id, revision_ref_1, revision_ref_2 = toolkit.get_or_bust( data_dict, ['id', 'revision_ref_1', 'revision_ref_2']) diff_type = data_dict.get('diff_type', 'unified') toolkit.check_access(u'dataset_release_diff', context, {'name_or_id': dataset_id}) revision_1 = _get_dataset_revision_dict(context, dataset_id, revision_ref_1) revision_2 = _get_dataset_revision_dict(context, dataset_id, revision_ref_2) diff = _generate_diff(revision_1, revision_2, diff_type) return { 'diff': diff, 'dataset_dict_1': revision_1, 'dataset_dict_2': revision_2, }
def showcase_package_association_delete(context, data_dict): '''Delete an association between a showcase and a package. :param showcase_id: id or name of the showcase in the association :type showcase_id: string :param package_id: id or name of the package in the association :type package_id: string ''' model = context['model'] toolkit.check_access('ckanext_showcase_package_association_delete', context, data_dict) # validate the incoming data_dict validated_data_dict, errors = validate( data_dict, showcase_package_association_delete_schema(), context) if errors: raise toolkit.ValidationError(errors) package_id, showcase_id = toolkit.get_or_bust(validated_data_dict, ['package_id', 'showcase_id']) showcase_package_association = ShowcasePackageAssociation.get( package_id=package_id, showcase_id=showcase_id) if showcase_package_association is None: raise toolkit.ObjectNotFound("ShowcasePackageAssociation with package_id '{0}' and showcase_id '{1}' doesn't exist.".format(package_id, showcase_id)) # delete the association showcase_package_association.delete() model.repo.commit()
def validate(cls, data_dict): data_dict = super(AgentQuery, cls).validate(data_dict) valid_agent_types = ['person', 'org', 'other'] agent_type = toolkit.get_or_bust(data_dict, 'agent_type') if agent_type not in valid_agent_types: raise toolkit.Invalid('Agent type must be one of {0}'.format( ', '.join(valid_agent_types))) valid_params = { 'person': dict(required=['family_name', 'given_names'], optional=['given_names_first']), 'org': dict(required=['name'], optional=['location']), 'other': dict(required=[], optional=[]) } required = ['agent_type'] + valid_params[agent_type]['required'] optional = ['user_id', 'external_id', 'external_id_scheme' ] + valid_params[agent_type]['optional'] for k in required: if k not in data_dict: raise toolkit.ValidationError( '{0} is a required field.'.format(k)) if 'external_id' in data_dict and 'external_id_scheme' not in data_dict: raise toolkit.ValidationError( 'external_id_scheme is a required field when external_id is set.' ) all_fields = required + optional for k in data_dict: if k not in all_fields: data_dict[k] = None return data_dict
def jsonpatch_delete(context, data_dict): """ Delete a JSON Patch. :param id: the id of the JSON Patch to delete :type id: string """ log.info("Deleting JSON Patch: %r", data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) jsonpatch_id = tk.get_or_bust(data_dict, 'id') jsonpatch = JSONPatch.get(jsonpatch_id) if jsonpatch is not None: jsonpatch_id = jsonpatch.id else: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('JSON Patch'))) tk.check_access('jsonpatch_delete', context, data_dict) rev = model.repo.new_revision() rev.author = user rev.message = _(u'REST API: Delete JSON Patch %s') % jsonpatch_id jsonpatch.delete() if not defer_commit: model.repo.commit()
def jsonpatch_show(context, data_dict): """ Return a JSON Patch definition. The structure of the returned dictionary may be customized by passing 'schema' in the context. :param id: the id of the JSON Patch :type id: string :rtype: dictionary """ log.debug("Retrieving JSON Patch: %r", data_dict) jsonpatch_id = tk.get_or_bust(data_dict, 'id') jsonpatch = JSONPatch.get(jsonpatch_id) if jsonpatch is not None: jsonpatch_id = jsonpatch.id else: raise tk.ObjectNotFound('%s: %s' % (_('Not found'), _('JSON Patch'))) tk.check_access('jsonpatch_show', context, data_dict) output_schema = context.get('schema') context['jsonpatch'] = jsonpatch jsonpatch_dict = jsonpatch_dictize(jsonpatch, context) result_dict, errors = tk.navl_validate(jsonpatch_dict, output_schema or schema.jsonpatch_show_schema(), context) return result_dict
def resource_history(context, data_dict): ''' Get an array with all the versions of the resource. In addition, each resource object in the array will contain an extra field called version, containing the version dictionary corresponding to that activity. :param resource_id: the id of the resource :type resource_id: string :returns array of resources :rtype array ''' resource_id = toolkit.get_or_bust(data_dict, ['resource_id']) versions_list = resource_version_list( { 'model': core_model, 'user': context['user'] }, {'resource_id': resource_id}) result = [] for version in versions_list: resource = activity_resource_show({'user': context['user']}, { 'activity_id': version['activity_id'], 'resource_id': version['resource_id'] }) resource['version'] = version result.append(resource) return result
def user_create(context, data_dict=None, original_action=None): """prefrom extra user create actions for the loopback server """ if original_action is None: raise toolkit.ValidationError, "Original action not provideded" user_info = { 'id': original_action['id'], 'username': original_action['name'], 'email': original_action['email'], 'apikey': original_action['id'], 'password': toolkit.get_or_bust(data_dict,'password1'), } if pylons.config.get('loopback_token') is None: loopback_login() loopback_user_url = pylons.config.get('ckan.loopback.user_url') loopback_token = pylons.config.get('loopback_token') request_url = '{}?access_token={}'.format(loopback_user_url, loopback_token) response = requests.post(request_url, data = user_info) if response.status_code == 401: loopback_login() else: response.raise_for_status() log.debug('LoopBack user created: {}'.format(user_info['id']))
def metadata_json_attr_map_delete(context, data_dict): """ Delete a metadata JSON attribute map. You must be authorized to delete the metadata JSON attribute map. :param id: the id or name of the metadata JSON attribute map to delete :type id: string """ log.info("Deleting metadata JSON attribute map: %r", data_dict) model = context['model'] user = context['user'] session = context['session'] defer_commit = context.get('defer_commit', False) metadata_json_attr_map_id = tk.get_or_bust(data_dict, 'id') metadata_json_attr_map = ckanext_model.MetadataJSONAttrMap.get( metadata_json_attr_map_id) if metadata_json_attr_map is not None: metadata_json_attr_map_id = metadata_json_attr_map.id else: raise tk.ObjectNotFound( '%s: %s' % (_('Not found'), _('Metadata JSON Attribute Map'))) tk.check_access('metadata_json_attr_map_delete', context, data_dict) rev = model.repo.new_revision() rev.author = user rev.message = _(u'REST API: Delete metadata JSON attribute map %s' ) % metadata_json_attr_map_id metadata_json_attr_map.delete() if not defer_commit: model.repo.commit()
def resource_version_list(context, data_dict): """List versions of a given resource :param resource_id: the id the resource :type resource_id: string :returns: list of matched versions :rtype: list """ model = context.get('model', core_model) resource_id = toolkit.get_or_bust(data_dict, ['resource_id']) resource = model.Resource.get(resource_id) if not resource: raise toolkit.ObjectNotFound('Resource not found') toolkit.check_access('version_list', context, {"package_id": resource.package_id}) versions = model.Session.query(Version).\ filter(Version.resource_id == resource.id).\ order_by(Version.created.desc()) if not versions: raise toolkit.ObjectNotFound('Versions not found for this resource') return [v.as_dict() for v in versions]
def showcase_admin_add(context, data_dict): '''Add a user to the list of showcase admins. :param username: name of the user to add to showcase user admin list :type username: string ''' toolkit.check_access('ckanext_showcase_admin_add', context, data_dict) # validate the incoming data_dict validated_data_dict, errors = validate( data_dict, showcase_admin_add_schema(), context) username = toolkit.get_or_bust(validated_data_dict, 'username') try: user_id = convert_user_name_or_id_to_id(username, context) except toolkit.Invalid: raise toolkit.ObjectNotFound if errors: raise toolkit.ValidationError(errors) if ShowcaseAdmin.exists(user_id=user_id): raise toolkit.ValidationError("ShowcaseAdmin with user_id '{0}' already exists.".format(user_id), error_summary=u"User '{0}' is already a Showcase Admin.".format(username)) # create showcase admin entry return ShowcaseAdmin.create(user_id=user_id)
def package_add_source(context, data_dict): '''List sources associated with a package. :param package_id: id or name of the package :type package_id: string :rtype: list of dictionaries ''' toolkit.check_access('package_update', context, data_dict) # validate the incoming data_dict validated_data_dict, errors = validate(data_dict, package_add_source_schema(), context) if errors: raise toolkit.ValidationError(errors) package_id, source_link, source_title = toolkit.get_or_bust( validated_data_dict, ['package_id', 'source_link', 'source_title']) if not source_link.startswith('http'): source_dict = toolkit.get_action('package_show')( context, { 'name_or_id': source_link }) return DatasetSourceModel.create(package_id=package_id, source_link=source_dict['id'], source_title=source_dict['title']) return DatasetSourceModel.create(package_id=package_id, source_link=source_link, source_title=source_title)
def extra_autocomplete(context, data_dict): extra = toolkit.get_or_bust(data_dict, 'extra') extra_name = 'extras_{0}'.format(extra) search_term = data_dict.get('search_term', '*') query = '{extra}:{search_term}'.format(extra=extra_name, search_term=search_term) pkg_search = toolkit.get_action('package_search')(context, {'q': query}) return [d.get(extra, '') for d in pkg_search.get('results', [])]
def scheming_organization_schema_show(context, data_dict): ''' Return the scheming schema for a given organization type :param type: the organization type ''' t = get_or_bust(data_dict, 'type') s = scheming_get_organization_schema(t) if s is None: raise ObjectNotFound() return s
def osed_content_headers(context, data_dict): ''' Returns some headers of a remote resource ''' url = get_or_bust(data_dict, 'url') response = get_content_headers(url) return { 'status_code': response.status_code, 'content-length': response.headers.get('content-length', ''), 'content-type': response.headers.get('content-type', ''), }
def osed_dataset_by_identifier(context, data_dict): user = tk.get_action('get_site_user')({'ignore_auth': True}, {}) context.update({'user': user['name']}) identifier = get_or_bust(data_dict, 'identifier') param = 'identifier:%s' % identifier result = tk.get_action('package_search')(context, {'fq': param}) try: return result['results'][0] except (KeyError, IndexError, TypeError): raise NotFound
def activity_list_from_user_since(context, data_dict): '''Return the activity stream of all recently added or changed packages. :param since_time: starting date/time Limited to 31 records (configurable via the ckan.activity_list_hard_limit setting) but may be called repeatedly with the timestamp of the last record to collect all activities. :param user_id: the user of the requested activity list :rtype: list of dictionaries ''' since = get_or_bust(data_dict, 'since_time') user_id = get_or_bust(data_dict, 'user_id') try: since_time = isodate(since, None) except Invalid, e: raise ValidationError({'since_time':e.error})
def inventory_entry_csv_single(context, data_dict): model = context['model'] name = get_or_bust(data_dict, 'name') organization = model.Group.get(name) if not organization: raise ObjectNotFound('Organization was not found') entries = [ table_dictize(entry, context) for entry in organization.inventory_entries] inventory_entries = model.Session.query(InventoryEntry).join(model.Group) return [(entry.title, entry.recurring_interval, entry.last_added_dataset_timestamp) for entry in inventory_entries]
def scheming_organization_schema_show(context, data_dict): ''' Return the scheming schema for a given organization type :param type: the organization type :param expanded: True to expand presets (default) ''' t = get_or_bust(data_dict, 'type') expanded = data_dict.get('expanded', True) s = scheming_get_organization_schema(t, expanded) if s is None: raise ObjectNotFound() return s
def user_delete(context, data_dict=None): """extra user delete actions for connecting chat """ model = context['model'] user_id = toolkit.get_or_bust(data_dict, 'id') delete_user = model.User.get(user_id) mysql_engine = create_engine(chat_connect(), pool_recycle=3600) connection = mysql_engine.connect() metadata = MetaData() users = Table('users', metadata, Column('userid', Integer), Column('name', String),) metadata.create_all(mysql_engine) delete = users.delete().where(users.c.name == str(delete_user.name)) connection.execute(delete)
def activate_user(context, data_dict): check_access('sysadmin', context, {}) model = context['model'] id = get_or_bust(data_dict, 'id') user_obj = model.User.get(id) if not user_obj: raise ObjectNotFound('User was not found') user_obj.activate() user_obj.save() try: send_reset_link(user_obj) except Exception, e: h.flash_error(_('Could not send reset link: %s') % unicode(e))
def organization_by_inventory_id(context, data_dict): model = context['model'] id = get_or_bust(data_dict, 'id') group_extra = model.meta.Session.query(model.GroupExtra) \ .filter_by(key='inventory_organization_id') \ .filter_by(value=id).first() if group_extra is None: raise ObjectNotFound('No GroupExtra with specificied inventory id') organization = model.meta.Session.query(model.Group) \ .filter_by(id=group_extra.group_id).first() if organization is None: raise ObjectNotFound('No organization with specificied inventory id') return model_dictize.group_dictize(organization, context)
def showcase_delete(context, data_dict): '''Delete a showcase. Showcase delete cascades to ShowcasePackageAssociation objects. :param id: the id or name of the showcase to delete :type id: string ''' model = context['model'] id = toolkit.get_or_bust(data_dict, 'id') entity = model.Package.get(id) if entity is None: raise toolkit.ObjectNotFound toolkit.check_access('ckanext_showcase_delete', context, data_dict) entity.purge() model.repo.commit()
def osed_dataset_terms_of_use(context, data_dict): ''' Returns the terms of use for the requested dataset. By definition the terms of use of a dataset corresponds to the least open rights statement of all distributions of the dataset ''' terms = [ 'NonCommercialAllowed-CommercialAllowed-ReferenceNotRequired', 'NonCommercialAllowed-CommercialAllowed-ReferenceRequired', 'NonCommercialAllowed-CommercialWithPermission-ReferenceNotRequired', 'NonCommercialAllowed-CommercialWithPermission-ReferenceRequired', 'ClosedData', ] user = tk.get_action('get_site_user')({'ignore_auth': True}, {}) req_context = {'user': user['name']} pkg_id = get_or_bust(data_dict, 'id') pkg = tk.get_action('package_show')(req_context, {'id': pkg_id}) least_open = None for res in pkg['resources']: try: if res['rights'] not in terms: least_open = 'ClosedData' break if least_open is None: least_open = res['rights'] continue if terms.index(res['rights']) > terms.index(least_open): least_open = res['rights'] except KeyError: pass if least_open is None: least_open = 'ClosedData' return { 'dataset_rights': least_open }
def dcat_validation(context, data_dict): ''' Return the validation errors for the last harvest job of the organization harvest source. ''' toolkit.check_access('dcat_validation', context, data_dict) org_id = toolkit.get_or_bust(data_dict, 'id') try: id = converters.convert_group_name_or_id_to_id(org_id, context) except toolkit.Invalid: raise toolkit.ObjectNotFound harvest_list = _harvest_list_for_org(context, id) if harvest_list: harvest_package = harvest_list[0] harvest_source_id = harvest_package.get('id', '') source_status = toolkit.get_action('harvest_source_show_status')( context=context, data_dict={'id': harvest_source_id}) return_obj = { 'url': harvest_package['url'], 'last_validation': None, 'result': None, } last_job = source_status.get('last_job', None) if last_job: return_obj['last_validation'] = last_job['gather_finished'] last_job_report = toolkit.get_action('harvest_job_report')(context={'ignore_auth': True}, data_dict={'id': last_job['id']}) gather_errors = last_job_report.get('gather_errors', []) gather_errors_list = [] error_count = 0 warning_count = 0 for error in gather_errors: try: message = json.loads(error.get('message')) except ValueError: # If the error can't be parsed it is normal text (not a # json object) which means its a more fundamental error # We therefore increment the error count error_count += 1 message = error.get('message', '') else: if message.get('errors') and type(message.get('errors')) is list: error_count += len(message.get('errors')) if message.get('warnings') and type(message.get('warnings')) is list: warning_count += len(message.get('warnings')) gather_errors_list.append(message) result_obj = { 'errors': error_count, 'warnings': warning_count, 'resources': gather_errors_list } return_obj['result'] = result_obj return return_obj else: return None