def update_field_index_validator(key, data, errors, context): index = data[key] try: index = int(index) except (ValueError, TypeError): raise toolkit.Invalid(toolkit._("index must be an int")) if index < 0: raise toolkit.Invalid(toolkit._("You can't have a negative index")) # Make sure the resource has a field with this index. resource_id = data[("resource_id",)] # We're assuming that resource_id has already been validated and is valid, # so resource_schema_show() won't raise an exception here. schema = toolkit.get_action("resource_schema_show")(context, {"resource_id": resource_id}) matching_fields = [] for field in schema.get("fields", []): if field["index"] == index: matching_fields.append(field) if len(matching_fields) == 0: raise toolkit.Invalid(toolkit._("There's no field with the given " "index")) if len(matching_fields) > 1: raise toolkit.Invalid( toolkit._( "There's more than one field with the " "given index (this shouldn't happen, " "something has gone wrong)" ) ) data[key] = index
def harvest_source_update(context, data_dict): """ Authorization check for harvest source update It forwards the checks to package_update, which will check for organization membership, whether if sysadmin, etc according to the instance configuration. """ model = context.get("model") user = context.get("user") source_id = data_dict["id"] pkg = model.Package.get(source_id) if not pkg: raise pt.ObjectNotFound(pt._("Harvest source not found")) context["package"] = pkg try: pt.check_access("package_update", context, data_dict) return {"success": True} except pt.NotAuthorized: return { "success": False, "msg": pt._("User {0} not authorized to update harvest source {1}").format(user, source_id), }
def _validate_resources_tobe_uploaded(self): """ Iterates through each resource file upload fields and check whether the uploaded resources are referenced against the package. If not throws an exception. """ log.debug("validating the resources to be uploaded.") upload_field_list = [] for col_index,title in self.upload_file_pos: upload_field_list.extend(filter(bool,self.xl_data.sheet.col_values(col_index, start_rowx=self.first_record_index))) upload_field_list = list(set(upload_field_list)) if self._resource_list: for resource in self._resource_list: if resource not in upload_field_list: raise Exception(toolkit._("Uploaded resource %s is not referenced against any dataset.") % resource) if len(upload_field_list) > len(self._resource_list): raise Exception(toolkit._("Referenced resources are not uploaded.")) else: if len(upload_field_list) > 0 : raise Exception(toolkit._("Referenced resources are not uploaded."))
def _save_new(self, context): model = context['model'] context['ignore_auth'] = True try: data_dict = logic.clean_dict(unflatten( logic.tuplize_dict(logic.parse_params(request.params)))) context['message'] = data_dict.get('log_message', '') captcha.check_recaptcha(request) organization = get_action('inventory_organization_by_inventory_id')( context, {'id': data_dict['inventory_organization_id']}) password = str(random.SystemRandom().random()) data_dict['password1'] = password data_dict['password2'] = password data_dict['state'] = model.State.PENDING user = get_action('user_create')(context, data_dict) data_dict = { 'id': organization['id'], 'role': 'editor', 'username': user['name'] } logic.get_action('organization_member_create')(context, data_dict) except NotAuthorized: abort(401, _('Unauthorized to create user %s') % '') except NotFound, e: abort(404, _('User or organization not found'))
def validator(key, data, errors, context): # if there was an error before calling our validator # don't bother with our validation if errors[key]: return codeset_type = field['codeset_type'] codeset_choices = h.codeset_choices(codeset_type) value = _data_lookup(key, data) if value is missing: value = [] elif isinstance(value, basestring): value = [value] elif isinstance(value, list): for element in value: if not isinstance(element, basestring): errors[key].append(_('expecting list of strings')) return else: errors[key].append(_('expecting list of strings')) return selected = set() # store in a set to eliminate duplicates for element in value: if element in codeset_choices: selected.add(element) else: errors[key].append(_('unexpected choice "%s"') % element) if not errors[key]: result = json.dumps(list(selected)) data[key] = result
def _loadclientconfig(self,config_path): """ Loads the client config file which contains the details about server and api key for accessing the server functions. """ import ConfigParser import os import urlparse if os.path.exists(config_path): cfgparser = ConfigParser.SafeConfigParser() cfgparser.readfp(open(config_path)) section = 'app:client' if cfgparser.has_section(section): self.url = cfgparser.get(section, 'api_url', '') #print "self.url: ",self.url if self.url =='': #print "API URL is None so can't proceed further" raise Exception(toolkit._("Unable to find API URL or URL is empty")) self.parsed = urlparse.urlparse(self.url) newparsed = list(self.parsed) self.netloc = self.parsed.netloc section = 'index:%s' % self.netloc if cfgparser.has_section(section): self.api_key = cfgparser.get(section, 'api_key', '') if self.api_key =='': #print "API URL is None so can't proceed further" raise Exception(toolkit._("Unable to find API key or API key is empty.")) else: raise Exception(toolkit._("Unable to find API URL or URL is empty")) else: #print "Unable to find the client configuration file." raise Exception(toolkit._("Unable to find the client config file."))
def validate_responsible_party(cls, field_name, email_string): """ Validates whether the input responsible party details already exists in the system.(Authors, maintainer and distributor are considered as Responsible Parties). Authors is a multi-valued field, hence separated by comma. If doesn't exist then raises validation Exception, otherwise returns the responsible party details as Json. """ from ckanext.ngds.env import ckan_model email_list = [x.strip() for x in str(email_string).split(',') if x.strip()] if len(email_list) > 1 and field_name.lower() != 'authors': raise Exception(toolkit._("Data Error: %s can not have more than one person") % field_name) party_list = [] for email in email_list: returned_party = ckan_model.ResponsibleParty.find(email.lower()).all() if returned_party and len(returned_party) > 0: user_dict = {} user_dict['name'] = returned_party[0].name user_dict['email'] = returned_party[0].email party_list.append(user_dict) else: raise Exception(toolkit._("Responsible party with email: %s not found in the system. Please add either manually or use loader script.") % email) import json if field_name.lower() == 'authors': return json.dumps(party_list) else: return json.dumps(party_list[0])
def validate_unstructured_resource(data): errors = [] # A bunch of validations for the unstructured resource form if 'url' not in data or len(data['url']) < 3: errors.append({ 'field': 'url', 'message': toolkit._('Resource URL is a mandatory parameter') }) if 'name' not in data or len(data['name']) == 0: errors.append({ 'field': 'name', 'message': toolkit._('Name must be non-empty') }) if len(errors) > 0: return { 'success': False, 'display': toolkit._('Validation Errors'), 'type': 'resource_form_validation_error', 'messages': errors } else: return { 'success': True }
def validate_offline_resource(data): errors = [] # A bunch of validations for the offline resource form if 'name' not in data or len(data['name']) == 0: errors.append({ 'field': 'name', 'message': toolkit._('Name must be non-empty') }) if 'ordering_procedure' not in data or len(data['ordering_procedure']) == 0: errors.append({ 'field': 'ordering_procedure', 'message': toolkit._('Ordering Procedure must be non-empty') }) if len(errors) > 0: return { 'display': toolkit._('Validation Errors'), 'type': 'resource_form_validation_error', 'success': False, 'messages': errors } else: return { 'success': True }
def dump(self, resource_id): try: offset = int_validator(request.GET.get('offset', 0), {}) except Invalid as e: abort(400, u'offset: ' + e.error) try: limit = int_validator(request.GET.get('limit'), {}) except Invalid as e: abort(400, u'limit: ' + e.error) bom = boolean_validator(request.GET.get('bom'), {}) fmt = request.GET.get('format', 'csv') if fmt not in DUMP_FORMATS: abort(400, _( u'format: must be one of %s') % u', '.join(DUMP_FORMATS)) try: dump_to( resource_id, response, fmt=fmt, offset=offset, limit=limit, options={u'bom': bom}) except ObjectNotFound: abort(404, _('DataStore resource not found'))
def validator(key, data, errors, context): # if there was an error before calling our validator # don't bother with our validation if errors[key]: return value = data[key] if value is not missing: if isinstance(value, basestring): value = [value] elif not isinstance(value, list): errors[key].append(_('expecting list of strings')) return else: value = [] selected = set() for element in value: if element in choice_values: selected.add(element) continue errors[key].append(_('unexpected choice "%s"') % element) if not errors[key]: data[key] = json.dumps([ c['value'] for c in field['choices'] if c['value'] in selected]) if field.get('required') and not selected: errors[key].append(_('Select at least one'))
def __init__(self, package_id, resource_id, workspace_name, layer_name, layer_version, username, geoserver, store=None, workspace=None, lat_field=None, lng_field=None): self.geoserver = Geoserver.from_ckan_config() self.name = layer_name self.layer_version = layer_version self.username = username self.file_resource = toolkit.get_action("resource_show")(None, {"id": resource_id}) self.package_id = package_id self.resource_id = resource_id self.store = self.geoserver.get_datastore(workspace, store, workspace_name, layer_version) self.workspace_name = workspace_name url = self.file_resource["url"] kwargs = {"resource_id": self.file_resource["id"]} # Determine whether to handle the data with shapefile or datastored csv operators if url.endswith('.zip'): cls = Shapefile elif url.endswith('.csv'): cls = Datastored kwargs.update({ "lat_field": lat_field, "lng_field": lng_field }) else: # The resource cannot be spatialized raise Exception(toolkit._("Only CSV and Shapefile data can be spatialized")) # '**' unpacks the kwargs dictionary which can contain an arbitrary number of arguments self.data = cls(**kwargs) # Spatialize if not self.data.publish(): # Spatialization failed raise Exception(toolkit._("Spatialization failed."))
def harvest_source_update(context, data_dict): ''' Authorization check for harvest source update It forwards the checks to package_update, which will check for organization membership, whether if sysadmin, etc according to the instance configuration. ''' model = context.get('model') user = context.get('user') source_id = data_dict['id'] pkg = model.Package.get(source_id) if not pkg: raise pt.ObjectNotFound(pt._('Harvest source not found')) context['package'] = pkg try: pt.check_access('package_update', context, data_dict) return {'success': True} except pt.NotAuthorized: msg = pt._('User {0} not authorized to update harvest source {1}')\ .format(user, source_id) return { 'success': False, 'msg': msg}
def __init__(self, package_id, resource_id, layer_name, username, geoserver=Geoserver.from_ckan_config(), lat_field=None, lng_field=None): self.geoserver = geoserver self.store = geoserver.default_datastore() self.name = layer_name self.username = username self.file_resource = toolkit.get_action("resource_show")(None, {"id": resource_id}) self.package_id = package_id self.resource_id = resource_id # Spatialize it url = self.file_resource["url"] kwargs = {"resource_id": self.file_resource["id"]} if url.endswith('.zip'): cls = Shapefile elif url.endswith('.csv'): cls = Datastored kwargs.update({ "lat_field": lat_field, "lng_field": lng_field }) else: # The resource cannot be spatialized raise Exception(toolkit._("Only CSV and Shapefile data can be spatialized")) self.data = cls(**kwargs) # Spatialize if not self.data.publish(): # Spatialization failed raise Exception(toolkit._("Spatialization failed."))
def edit(self, organization_name, inventory_entry_id, data=None, errors=None, error_summary=None): context = {'model': model, 'session': model.Session, 'user': c.user or c.author, 'organization_name': c.organization_name, 'save': 'save' in request.params, 'schema': default_inventory_entry_schema_create()} if context['save'] and not data: return self._save_edit(inventory_entry_id, context) try: old_data = get_action('inventory_entry_show')( context, {'id': inventory_entry_id}) data = data or old_data except NotFound: abort(404, _('Inventory Entry not found')) except NotAuthorized: abort(401, _('Unauthorized to read inventory entry')) data = data or {} errors = errors or {} error_summary = error_summary or {} vars = {'data': data, 'errors': errors, 'error_summary': error_summary, 'action': 'edit'} c.form = render('inventory/entry/inventory_entry_form.html', extra_vars=vars) return render('inventory/entry/edit.html')
def resource_schema_pkey_create(context, data_dict): '''Add a primary key to a resource's schema. :param resource_id: the ID of the resource :type resource_id: string :param pkey: the primary key, either the name of one of the fields or a list of field names from the resource's schema :type pkey: string or iterable of strings :returns: the primary key that was created :rtype: string or list of strings ''' # Fail if the resource already has a primary key. resource_id = toolkit.get_or_bust(data_dict, 'resource_id') try: pkey = toolkit.get_action('resource_schema_pkey_show')(context, {'resource_id': resource_id}) except exceptions.InvalidResourceIDException: raise toolkit.ValidationError(toolkit._("Invalid resource_id")) if pkey is not None: raise toolkit.ValidationError(toolkit._("The resource already has a " "primary key")) # Otherwise create is the same as update. return toolkit.get_action('resource_schema_pkey_update')(context, data_dict)
def inventory_entry_bulk_create(context, data_dict): model = context['model'] schema = context['schema'] session = context['session'] organization = model.Group.get(context['organization_name']) inventory_entry_dict = {'group_id': organization.id} if not data_dict['field-name-input-0']: raise ValidationError({'error': [_('Please add at least one inventory entry.')]}) for inventory_entry_id in range(10): inventory_entry_name = data_dict['field-name-input-' + str(inventory_entry_id)] if not inventory_entry_name: break inventory_entry_dict['title'] = inventory_entry_name inventory_entry_dict['recurring_interval'] = data_dict['field-recurring-input-' + str(inventory_entry_id)] inventory_entry_dict['is_recurring'] = (inventory_entry_dict['recurring_interval'] != '0') data, errors = navl_validate(inventory_entry_dict, schema, context) if errors: session.rollback() # TODO @palcu: i18n raise ValidationError({'error': [_('Verificati intrarea cu numarul {0}.'.format(inventory_entry_id+1))]}) obj = table_dict_save(inventory_entry_dict, InventoryEntry, context) model.repo.commit() return table_dictize(obj, context)
def validate_datarequest(context, request_data): errors = {} # Check name if len(request_data['title']) > constants.NAME_MAX_LENGTH: errors['Title'] = [tk._('Title must be a maximum of %d characters long') % constants.NAME_MAX_LENGTH] if not request_data['title']: errors['Title'] = [tk._('Title cannot be empty')] # Title is only checked in the database when it's correct avoid_existing_title_check = context['avoid_existing_title_check'] if 'avoid_existing_title_check' in context else False if 'Title' not in errors and not avoid_existing_title_check: if db.DataRequest.datarequest_exists(request_data['title']): errors['Title'] = ['That title is already in use'] # Check description if len(request_data['description']) > constants.DESCRIPTION_MAX_LENGTH: errors['Description'] = [tk._('Description must be a maximum of %d characters long') % constants.DESCRIPTION_MAX_LENGTH] # Check organization if request_data['organization_id']: try: tk.get_validator('group_id_exists')(request_data['organization_id'], context) except Exception: errors['Organization'] = ['Organization is not valid'] if len(errors) > 0: raise tk.ValidationError(errors)
def request_reset(self): context = {"model": model, "session": model.Session, "user": toolkit.c.user, "auth_user_obj": toolkit.c.userobj} data_dict = {"id": toolkit.request.params.get("user")} try: toolkit.check_access("request_reset", context) except toolkit.NotAuthorized: toolkit.abort(401, toolkit._("Unauthorized to request reset password.")) if toolkit.request.method == "POST": id = toolkit.request.params.get("user") context = {"model": model, "user": toolkit.c.user} data_dict = {"id": id} user_obj = None try: toolkit.get_action("user_show")(context, data_dict) user_obj = context["user_obj"] except toolkit.ObjectNotFound: h.flash_error(toolkit._("No such user: %s") % id) if user_obj: try: mailer.send_reset_link(user_obj) h.flash_success(toolkit._("Please check your inbox for " "a reset code.")) h.redirect_to("/") except mailer.MailerException, e: h.flash_error(toolkit._("Could not send reset link: %s") % unicode(e))
def organization_facets(self, facets_dict, organization_type, package_type): return { 'license_id': toolkit._('License'), 'res_format': toolkit._('Formats'), 'extras_odm_language': toolkit._('Language'), 'extras_odm_spatial_range': toolkit._('Country') }
def as_package_id(package_id_or_name, context): """given a package_id_or_name, return just the package id""" model = context["model"] package = model.Package.get(package_id_or_name) if not package: raise toolkit.Invalid("%s: %s" % (toolkit._("Not found"), toolkit._("Dataset"))) else: return package.id
def validate_datarequest_closing(context, request_data): accepted_dataset_id = request_data.get('accepted_dataset_id', '') if accepted_dataset_id: try: tk.get_validator('package_name_exists')(accepted_dataset_id, context) except Exception: raise tk.ValidationError({tk._('Accepted Dataset'): [tk._('Dataset not found')]})
def group_facets(self, facets_dict, group_type, package_type): default_facet_titles = { 'groups': tk._('Categories'), 'tags': tk._('Tags'), 'res_format': tk._('Formats') # 'license_id': tk._('License'), } return default_facet_titles
def dataset_facets(self, facets_dict, package_type): new_facets_dict = OrderedDict() new_facets_dict['openstatus'] = toolkit._('Offenheit') new_facets_dict['metadata_source_type'] = toolkit._('Source') #del facets_dict['tags'] for key in facets_dict: new_facets_dict[key] = facets_dict[key] return new_facets_dict
def as_org_id(org_id_or_name, context): """given a org_id_or_name, return just the org id""" model = context["model"] org = model.Group.get(org_id_or_name) if not org: raise toolkit.Invalid("%s: %s" % (toolkit._("Not found"), toolkit._("Organization"))) else: return org.id
def info(self): return { "name": "gdoc_view", "title": toolkit._('Google Doc Previewer'), "default_title": toolkit._('Preview'), "icon": "compass", "always_available": True, "iframed": False, }
def info(self): return { "name": "officedocs_view", "title": toolkit._('Office Previewer'), "default_title": toolkit._('Preview'), "icon": "compass", "always_available": True, "iframed": False, }
def as_package_id(package_id_or_name, context): '''given a package_id_or_name, return just the package id''' model = context['model'] package = model.Package.get(package_id_or_name) if not package: raise toolkit.Invalid('%s: %s' % (toolkit._('Not found'), toolkit._('Dataset'))) else: return package.id
def publish_ogc(context, data_dict): """ Publishes the resource details as a Geoserver layer based on the input details. If the layer creation is successful then returns "Success" msg, otherwise raises an Exception. """ # pdb.set_trace() # Gather inputs resource_id = data_dict.get("resource_id", None) layer_name = data_dict.get("layer_name", resource_id) username = context.get("user", None) package_id = data_dict.get("package_id", None) lat_field = data_dict.get("col_latitude", None) lng_field = data_dict.get("col_longitude", None) datastore = data_dict.get("geoserver_datastore", None) layer_version = data_dict.get("layer_version", None) workspace_name = data_dict.get("workspace_name", None) api_call_type = context.get("api_call_type", "ui") # Check that you have everything you need if None in [resource_id, layer_name, username, package_id, layer_version, workspace_name]: raise Exception(toolkit._("Not enough information to publish resource")) # Publish a layer def pub(): layer = Layer.publish(package_id, resource_id, workspace_name, layer_name, layer_version, username, datastore, lat_field=lat_field, lng_field=lng_field) return layer try: l = pub() if l is None: # pdb.set_trace() log.debug("Failed to generate a Geoserver layer.") if api_call_type == 'ui': h.flash_error(_("Failed to generate a Geoserver layer.")) raise Exception(toolkit._("Layer generation failed")) else: # csv content should be spatialized or a shapefile uploaded, Geoserver updated, resources appended. # l should be a Layer instance. Return whatever you wish to log.debug("This resource has successfully been published as an OGC service.") # pdb.set_trace() if api_call_type == 'ui': h.flash_success(_("This resource has successfully been published as an OGC service.")) return {"success": True, "message": _("This resource has successfully been published as an OGC service.") } except socket.error: log.debug("Error connecting to Geoserver.") # pdb.set_trace() if api_call_type == 'ui': h.flash_error(_("Error connecting to Geoserver."))
def shortcode_validate(key, data, errors, context): """ Accept shortcodes in the following forms and convert to a json list for storage: 1. a list of strings, eg. ["code-one", "code-two"] 2. a single string value with semicolon-separated values "code-one;code-two" """ # just in case there was an error before our validator, # bail out here because our errors won't be useful if errors[key]: return value = data[key] if value is missing: data[key] = json.dumps([]) return if isinstance(value, basestring): try: if isinstance(json.loads(value), list): return value except ValueError: pass # value wasn't in json format, keep processing except TypeError: data[key] = json.dumps([]) return value = value.split(';') if not isinstance(value, list): errors[key].append(_('expecting list of strings')) return out = [] for element in value: if not isinstance(element, basestring): errors[key].append(_('invalid type for shortcode: %r') % element) continue if isinstance(element, str): try: element = element.decode('utf-8') except UnicodeDecodeError: errors[key].append(_('invalid encoding for "%s" value') % lang) continue out.append(element) # TODO: future: check values against valid choices for this field # using @scheming_validator decorator to get the form field name if not errors[key]: data[key] = json.dumps(out)
def auth_ytp_tasks_add(context, data_dict): if not user_is_sysadmin(context): return {'success': False, 'msg': toolkit._('Only sysadmins can add organization source')} else: return {'success': True}
def state_validator(key, data, errors, context): possible_state = ['new', 'open', 'archive'] if data[key] not in possible_state: message = _('The state parameter must be new, open or archive.') errors[key].append(message)
def copy(self, id): context = { 'model': m, 'session': m.Session, 'user': p.toolkit.c.user or p.toolkit.c.author, 'auth_user_obj': p.toolkit.c.userobj, 'save': 'save' in t.request.params, } # check permissions try: t.check_access('package_create', context) except t.NotAuthorized: t.abort(401, t._('Unauthorized to copy this package')) data_dict = {'id': id} data = t.get_action('package_show')(None, data_dict) # change dataset title and name data['name'] = '{}-copy'.format(data['name']) while True: try: _pkg = t.get_action('package_show')(None, { 'name_or_id': data['name'] }) except l.NotFound: break else: import random data['name'] = '{}-copy-{}'.format(data['name'], random.randint(1, 100)) data['title'] = 'Copy of {0}'.format(data['title']) # remove unnecessary attributes from the dataset remove_attrs = [ 'id', 'revision_id', 'metadata_created', 'metadata_modified', 'resources', 'revision_timestamp' ] for attr in remove_attrs: if attr in data: del data[attr] if data and 'type' in data: package_type = data['type'] else: package_type = self._guess_package_type(True) data = data or clean_dict( dict_fns.unflatten( tuplize_dict( parse_params(t.request.params, ignore_keys=CACHE_PARAMETERS)))) c.resources_json = h.json.dumps(data.get('resources', [])) # convert tags if not supplied in data if data and not data.get('tag_string'): data['tag_string'] = ', '.join( h.dict_list_reduce(data.get('tags', {}), 'name')) # if we are creating from a group then this allows the group to be # set automatically data['group_id'] = t.request.params.get('group') or \ t.request.params.get('groups__0__id') # in the phased add dataset we need to know that # we have already completed stage 1 stage = ['active'] if data.get('state', '').startswith('draft'): stage = ['active', 'complete'] form_snippet = self._package_form(package_type=package_type) form_vars = { 'data': data, 'errors': {}, 'error_summary': {}, 'action': 'new', 'stage': stage, 'dataset_type': package_type, } c.errors_json = h.json.dumps({}) # override form action to use built-in package controller c.form_action = t.url_for(controller='package', action='new') self._setup_template_variables(context, {}, package_type=package_type) new_template = self._new_template(package_type) extra_vars = { 'form_vars': form_vars, 'form_snippet': form_snippet, 'dataset_type': package_type } return t.render(new_template, extra_vars=extra_vars)
def upsert_data(context, data_dict): '''insert all data from records''' if not data_dict.get('records'): return method = data_dict.get('method', _UPSERT) fields = _get_fields(context, data_dict) field_names = _pluck('id', fields) records = data_dict['records'] sql_columns = ", ".join( ['"%s"' % name.replace('%', '%%') for name in field_names] + ['"_full_text"']) if method == _INSERT: rows = [] for num, record in enumerate(records): _validate_record(record, num, field_names) row = [] for field in fields: value = record.get(field['id']) if value and field['type'].lower() == 'nested': ## a tuple with an empty second value value = (json.dumps(value), '') row.append(value) row.append(_to_full_text(fields, record)) rows.append(row) sql_string = u'''INSERT INTO "{res_id}" ({columns}) VALUES ({values}, to_tsvector(%s));'''.format( res_id=data_dict['resource_id'], columns=sql_columns, values=', '.join(['%s' for field in field_names])) try: context['connection'].execute(sql_string, rows) except sqlalchemy.exc.DataError as err: raise InvalidDataError( toolkit._( "The data was invalid (for example: a numeric value " "is out of range or was inserted into a text field).")) elif method in [_UPDATE, _UPSERT]: unique_keys = _get_unique_key(context, data_dict) if len(unique_keys) < 1: raise ValidationError( {'table': [u'table does not have a unique key defined']}) for num, record in enumerate(records): # all key columns have to be defined missing_fields = [ field for field in unique_keys if field not in record ] if missing_fields: raise ValidationError({ 'key': [ u'''fields "{fields}" are missing but needed as key'''.format( fields=', '.join(missing_fields)) ] }) for field in fields: value = record.get(field['id']) if value is not None and field['type'].lower() == 'nested': ## a tuple with an empty second value record[field['id']] = (json.dumps(value), '') non_existing_filed_names = [ field for field in record if field not in field_names ] if non_existing_filed_names: raise ValidationError({ 'fields': [ u'fields "{0}" do not exist'.format( ', '.join(non_existing_filed_names)) ] }) unique_values = [record[key] for key in unique_keys] used_fields = [field for field in fields if field['id'] in record] used_field_names = _pluck('id', used_fields) used_values = [record[field] for field in used_field_names] full_text = _to_full_text(fields, record) if method == _UPDATE: sql_string = u''' UPDATE "{res_id}" SET ({columns}, "_full_text") = ({values}, to_tsvector(%s)) WHERE ({primary_key}) = ({primary_value}); '''.format(res_id=data_dict['resource_id'], columns=u', '.join([ u'"{0}"'.format(field) for field in used_field_names ]), values=u', '.join(['%s' for _ in used_field_names]), primary_key=u','.join([ u'"{0}"'.format(part) for part in unique_keys ]), primary_value=u','.join(["%s"] * len(unique_keys))) results = context['connection'].execute( sql_string, used_values + [full_text] + unique_values) # validate that exactly one row has been updated if results.rowcount != 1: raise ValidationError({ 'key': [u'key "{0}" not found'.format(unique_values)] }) elif method == _UPSERT: sql_string = u''' UPDATE "{res_id}" SET ({columns}, "_full_text") = ({values}, to_tsvector(%s)) WHERE ({primary_key}) = ({primary_value}); INSERT INTO "{res_id}" ({columns}, "_full_text") SELECT {values}, to_tsvector(%s) WHERE NOT EXISTS (SELECT 1 FROM "{res_id}" WHERE ({primary_key}) = ({primary_value})); '''.format( res_id=data_dict['resource_id'], columns=u', '.join([ u'"{0}"'.format(field) for field in used_field_names ]), values=u', '.join([ '%s::nested' if field['type'] == 'nested' else '%s' for field in used_fields ]), primary_key=u','.join( [u'"{0}"'.format(part) for part in unique_keys]), primary_value=u','.join(["%s"] * len(unique_keys))) context['connection'].execute( sql_string, (used_values + [full_text] + unique_values) * 2)
def custom_boolean_validator(key, data, errors, context): value = data.get(key) if not isinstance(value, bool): errors[key].append(toolkit._('Incorrect type, should be boolean'))
def request_counter_validator(key, data, errors, context): counters = ['request', 'replied', 'declined', 'shared'] if data[key] not in counters: message = _( 'The flag parameter must be request, replied, declined, or shared') errors[key].append(message)
def required_if_public(key, data, errors, context): value = data[key] private = data[('private',)] if private == 'False' and not value: errors[key].append(_('This field is required'))
def publishOGC(self): """ Publishes the resource content into Geoserver. """ if request.method != 'POST' or not request.is_xhr: return { 'success': False, 'message': toolkit._("Bad request - JSON Error: No request body data") } context = {'model': model, 'session': model.Session, 'user': c.user or c.author, 'auth_user_obj': c.userobj} data = clean_dict(unflatten(tuplize_dict(parse_params(request.params)))) result = {'success': False, 'message': toolkit._("Not enough information to publish this resource.") } resource_id = data.get("resource_id", None) username = context.get("user", None) package_id = data.get("package_id", None) lat_field = data.get("geoserver_lat_field", None) lng_field = data.get("geoserver_lng_field", None) state = data.get("geoserver_state_field", None) #get layer from package try: md_package = None pkg = toolkit.get_action('package_show')(context, {'id': package_id}) extras = pkg.get('extras', []) for extra in extras: key = extra.get('key', None) if key == 'md_package': md_package = json.loads(extra.get('value')) break resourceDescription = md_package.get('resourceDescription', {}) layer = resourceDescription.get('usginContentModelLayer', resource_id) version = resourceDescription.get('usginContentModelVersion', None) # handle harvested datasets that do not have a md_package if layer == resource_id and version == None: usgin_tag = [] for tag in pkg['tags']: if tag['name'].startswith('usgincm:'): usgin_tag.append(tag['name']) for key,value in (get_meta_action.get_usgin_prefix()).iteritems(): if reduce(lambda v1,v2: v1 or v2, map(lambda v: v in usgin_tag, value)): key_arr = key.split("+") break layer = key_arr[1] version = key_arr[2] except: return result layer_name = data.get("layer_name", layer) workspace_name = state+''+layer_name if None in [resource_id, layer_name, username, package_id, version, state]: return result try: result = toolkit.get_action('geoserver_publish_ogc')(context, {'package_id': package_id, 'resource_id': resource_id, 'workspace_name': workspace_name, 'layer_name': layer_name, 'username': username, 'col_latitude': lat_field, 'col_longitude': lng_field, 'layer_version': version}) except: return { 'success': False, 'message': toolkit._("An error occured while processing your request, please contact your administrator.") } return result
def _render_resource_edit_page_first_time(self, package_id, resource_id, data=None, errors=None, error_summary=None): '''Render the resource edit page for the first time. This happens when a user gets to the resource edit page for the first time (e.g. by clicking a link on another page) as opposed to already being on the resource edit page and clicking something on the page that causes it to reload. The data for the resource edit form needs to be retrieved from the database before rendering it. ''' # Get the package dict and resource dict. context = { 'api_version': 3, 'for_edit': True, 'user': toolkit.c.user or toolkit.c.author, 'auth_user_obj': toolkit.c.userobj } pkg_dict = toolkit.get_action('package_show')(context, { 'id': package_id }) if pkg_dict['state'].startswith('draft'): resource_dict = toolkit.get_action('resource_show')( context, { 'id': resource_id }) fields = [ 'url', 'resource_type', 'format', 'name', 'description', 'id' ] data = {} for field in fields: data[field] = resource_dict[field] return self.new_resource(package_id, data=data) else: try: resource_dict = toolkit.get_action('resource_show')( context, { 'id': resource_id }) except toolkit.ObjectNotFound: toolkit.abort(404, toolkit._('Resource not found')) # Setup template context variables. toolkit.c.pkg_dict = pkg_dict toolkit.c.resource = resource_dict toolkit.c.form_action = helpers.url_for(controller='package', action='resource_edit', resource_id=resource_id, id=package_id) # Get the resource schema fields that will be passed to the template # for rendering into the form. schema_fields = toolkit.get_action('resource_schema_show')( data_dict={ 'resource_id': resource_id })['fields'] # We add one __new_attr entry into each field, this is used when the # user wants to add a new attribute to a field. for field in schema_fields: assert '__new_attr' not in field, ("__new_attr keys are never " "saved in the db") field['__new_attr'] = {'key': '', 'value': ''} # Setup template extra_vars. schema_errors = [{} for field in schema_fields] extra_vars = { 'action': 'new', 'selected_column': 0, 'data': data or resource_dict, 'errors': errors or {}, 'error_summary': error_summary or {}, 'schema_fields': schema_fields, 'schema_errors': schema_errors, } return toolkit.render('package/resource_edit.html', extra_vars=extra_vars)
class DataPackagerPackageController(toolkit.BaseController): def new_metadata(self, id, data=None, errors=None, error_summary=None): import ckan.lib.base as base # Change the package state from draft to active and save it. context = { 'model': model, 'session': model.Session, 'user': toolkit.c.user or toolkit.c.author, 'auth_user_obj': toolkit.c.userobj } data_dict = toolkit.get_action('package_show')(context, {'id': id}) data_dict['id'] = id data_dict['state'] = 'active' toolkit.get_action('package_update')(context, data_dict) base.redirect( helpers.url_for(controller='package', action='read', id=id)) def download_tabular_data_format(self, package_id): '''Return the given package as a Tabular Data Format ZIP file. ''' context = { 'model': model, 'session': model.Session, 'user': toolkit.c.user or toolkit.c.author, } r = toolkit.response r.content_disposition = 'attachment; filename={0}.zip'.format( package_id) r.content_type = 'application/octet-stream' # Make a zipstream and put it in the context. This means the # package_to_tabular_data_format action will add files into # the zipstream for us. pkg_zipstream = zipstream.ZipFile(mode='w', compression=zipstream.ZIP_DEFLATED) context['pkg_zipstream'] = pkg_zipstream toolkit.get_action('package_to_tabular_data_format')(context, { 'id': package_id }) return pkg_zipstream def new_resource(self, package_id, data): import ckan.controllers.package return ckan.controllers.package.PackageController().new_resource( package_id, data) def _render_resource_edit_page_first_time(self, package_id, resource_id, data=None, errors=None, error_summary=None): '''Render the resource edit page for the first time. This happens when a user gets to the resource edit page for the first time (e.g. by clicking a link on another page) as opposed to already being on the resource edit page and clicking something on the page that causes it to reload. The data for the resource edit form needs to be retrieved from the database before rendering it. ''' # Get the package dict and resource dict. context = { 'api_version': 3, 'for_edit': True, 'user': toolkit.c.user or toolkit.c.author, 'auth_user_obj': toolkit.c.userobj } pkg_dict = toolkit.get_action('package_show')(context, { 'id': package_id }) if pkg_dict['state'].startswith('draft'): resource_dict = toolkit.get_action('resource_show')( context, { 'id': resource_id }) fields = [ 'url', 'resource_type', 'format', 'name', 'description', 'id' ] data = {} for field in fields: data[field] = resource_dict[field] return self.new_resource(package_id, data=data) else: try: resource_dict = toolkit.get_action('resource_show')( context, { 'id': resource_id }) except toolkit.ObjectNotFound: toolkit.abort(404, toolkit._('Resource not found')) # Setup template context variables. toolkit.c.pkg_dict = pkg_dict toolkit.c.resource = resource_dict toolkit.c.form_action = helpers.url_for(controller='package', action='resource_edit', resource_id=resource_id, id=package_id) # Get the resource schema fields that will be passed to the template # for rendering into the form. schema_fields = toolkit.get_action('resource_schema_show')( data_dict={ 'resource_id': resource_id })['fields'] # We add one __new_attr entry into each field, this is used when the # user wants to add a new attribute to a field. for field in schema_fields: assert '__new_attr' not in field, ("__new_attr keys are never " "saved in the db") field['__new_attr'] = {'key': '', 'value': ''} # Setup template extra_vars. schema_errors = [{} for field in schema_fields] extra_vars = { 'action': 'new', 'selected_column': 0, 'data': data or resource_dict, 'errors': errors or {}, 'error_summary': error_summary or {}, 'schema_fields': schema_fields, 'schema_errors': schema_errors, } return toolkit.render('package/resource_edit.html', extra_vars=extra_vars) def _re_render_resource_edit_page(self, package_id, resource_id, data=None, errors=None, error_summary=None, schema_errors=None): '''Re-render the resource edit page, sending back the form data that the user submitted. This happens when the user clicks a button on the resource edit page that causes the page to be reloaded (e.g. clicking on one of the columns in the CSV preview). The page is re-rendered with any form values that the user had entered intact. ''' # Get the form data that the user submitted. data = data or _get_data() schema_fields = _extract_fields_from_data(data) # FIXME: This actually validates the data in all of the resource schema # fields, when we only really need to validate the field for the # currently selected column. schema_errors = schema_errors or _call_actions( schema_fields, resource_id, validate_only=True) # Setup template context variables. toolkit.c.pkg_dict = toolkit.get_action('package_show')( { 'for_edit': True }, { 'id': package_id }) toolkit.c.resource = toolkit.get_action('resource_show')( { 'for_edit': True }, { 'id': resource_id }) toolkit.c.form_action = helpers.url_for(controller='package', action='resource_edit', resource_id=resource_id, id=package_id) # We show the first column whose schema fields have any validation # errors, or if no columns have validation errors then we show the # column that the user clicked on. selected_column = _first_column_with_errors(schema_errors) if selected_column is None: selected_column = _selected_column() # Setup template extra_vars. extra_vars = { 'schema_fields': schema_fields, 'schema_errors': schema_errors, 'selected_column': selected_column, 'data': _delete_form_logic_keys(data) or toolkit.c.resource, 'errors': errors or {}, 'error_summary': error_summary or {}, 'action': 'new', } return toolkit.render('package/resource_edit.html', extra_vars=extra_vars) def _update_resource(self, package_id, resource_id, data): '''Update a resource and redirect to the resource_read page. This happens when the user clicks the Save button on the resource_edit page. The form values they submit are saved to the database and they're sent to the resource_read page. If there are any validation errors when trying to save the submitted values, then the edit form is re-rendered with the error messages instead of redirectin to the read page. ''' # Get the form data that the user submitted. data = data or _get_data() new_fields = _extract_fields_from_data(data) data = _delete_form_logic_keys(data) # Try to create, update and delete all the resource schema fields in # db as necessary. schema_errors = _call_actions(new_fields, resource_id) # If there were errors, re-render the resoure edit form with the error # messages. for error_dict in schema_errors: if error_dict: # Setup the template context and extra_vars that the form needs # and render the form. toolkit.c.pkg_dict = toolkit.get_action('package_show')( { 'for_edit': True }, { 'id': package_id }) toolkit.c.resource = toolkit.get_action('resource_show')( { 'for_edit': True }, { 'id': resource_id }) toolkit.c.form_action = helpers.url_for( controller='package', action='resource_edit', resource_id=resource_id, id=package_id) extra_vars = { 'schema_fields': new_fields, 'schema_errors': schema_errors, 'selected_column': _first_column_with_errors(schema_errors), 'data': data or toolkit.c.resource, 'errors': {}, 'error_summary': {}, 'action': 'new', } return toolkit.render('package/resource_edit.html', extra_vars=extra_vars) # Add the current resource schema into the data dict, so that # resource_update doesn't delete it. current_resource = toolkit.get_action('resource_show')(data_dict={ 'id': data['id'] }) data['schema'] = current_resource['schema'] # Update the resource itself (e.g. if the user has changed the # resource's name or file). data['package_id'] = package_id data['id'] = resource_id context = { 'model': model, 'session': model.Session, 'api_version': 3, 'for_edit': True, 'user': pylons.c.user or pylons.c.author, 'auth_user_obj': pylons.c.userobj } try: toolkit.get_action('resource_update')(context, data) except toolkit.ValidationError, e: errors = e.error_dict error_summary = e.error_summary return self.resource_edit(package_id, resource_id, data, errors, error_summary) except toolkit.NotAuthorized: toolkit.abort(401, toolkit._('Unauthorized to edit this resource'))
def is_valid_status(value, context): if value in issuemodel.ISSUE_STATUS: return value else: raise toolkit.Invalid( toolkit._('{0} is not a valid status'.format(value)))
def unauthorized(): toolkit.c.code = 401 toolkit.c.content = toolkit._('You are not authorized to do this') return toolkit.render('error_document_template.html')
"""Return a list of groups""" return tk.get_action('group_list')({}, {'all_fields': True}) # def package_showcase_list(context): # return tk.get_action('ckanext_package_showcase_list')({}, {'package_id': context.pkg_dict['id']}) def ckan_site_url(): return config.get('ckan.site_url', '').rstrip('/') # monkeypatch activity streams to rename 'group' to 'topic' activity_streams['changed group'] = ( lambda c, a: tk._("{actor} updated the topic {group}")) activity_streams['deleted group'] = ( lambda c, a: tk._("{actor} deleted the topic {group}")) activity_streams['new group'] = ( lambda c, a: tk._("{actor} created the topic {group}")) # Add back activity types removed in the 'related'->'showcase' upgrade. # They'll be generic, but at least they won't crash. activity_streams['changed related item'] = ( lambda c, a: tk._("{actor} updated a related item.")) activity_streams['deleted related item'] = ( lambda c, a: tk._("{actor} deleted a related item.")) activity_streams['new related item'] = ( lambda c, a: tk._("{actor} created a related item."))
def user_update(context, data_dict): msg = toolkit._("Users cannot be edited.") return _no_permissions(context, msg)
def resource_table_status_update(context, data_dict): msg = toolkit._('Cannot update resource table status.') return {'success': False, 'msg': msg}
def issue_exists(issue_id, context): issue_id = is_positive_integer(issue_id, context) result = issuemodel.Issue.get(issue_id, session=context['session']) if not result: raise toolkit.Invalid(toolkit._('Issue not found') + ': %s' % issue_id) return issue_id
def validate_datarequest(context, request_data): errors = {} # Check name if len(request_data["title"]) > constants.NAME_MAX_LENGTH: errors[tk._("Title")] = [ tk._("Title must be a maximum of {max_len} characters long"). format(max_len=constants.NAME_MAX_LENGTH) ] if not request_data["title"]: errors[tk._("Title")] = [tk._("Title cannot be empty")] # Title is only checked in the database when it's correct avoid_existing_title_check = (context["avoid_existing_title_check"] if "avoid_existing_title_check" in context else False) if "Title" not in errors and not avoid_existing_title_check: if db.DataRequest.datarequest_exists(request_data["title"]): errors[tk._("Title")] = [tk._("That title is already in use")] # Check description if (datarequests.get_config_bool_value( "ckan.datarequests.description_required", False) and not request_data["description"]): errors[tk._("Description")] = [tk._("Description cannot be empty")] if len(request_data["description"]) > constants.DESCRIPTION_MAX_LENGTH: errors[tk._("Description")] = [ tk._("Description must be a maximum of {max_len} characters long"). format(max_len=constants.DESCRIPTION_MAX_LENGTH) ] # Check organization if request_data["organization_id"]: try: tk.get_validator("group_id_exists")( request_data["organization_id"], context) except Exception: errors[tk._("Organization")] = [tk._("Organization is not valid")] if len(errors) > 0: raise tk.ValidationError(errors)
def workflow_state_display_name(workflow_state_id): if not workflow_state_id: return tk._('(None)') workflow_state = model_ext.WorkflowState.get(workflow_state_id) workflow_state_dict = workflow_state_dictize(workflow_state, context) return workflow_state_dict['display_name']
def cas_unauthorized(self): # This is our you are not authorized page c = toolkit.c c.code = 401 c.content = toolkit._('You are not authorized to do this') return toolkit.render('error_document_template.html')
def send_error_mail(context, source_id, status): last_job = status['last_job'] source = get_action('harvest_source_show')(context, {'id': source_id}) ckan_site_url = config.get('ckan.site_url') job_url = toolkit.url_for('harvest_job_show', source=source['id'], id=last_job['id']) msg = toolkit._('This is a failure-notification of the latest harvest job ({0}) set-up in {1}.').format(job_url, ckan_site_url) msg += '\n\n' msg += toolkit._('Harvest Source: {0}').format(source['title']) + '\n' if source.get('config'): msg += toolkit._('Harvester-Configuration: {0}').format(source['config']) + '\n' msg += '\n\n' if source['organization']: msg += toolkit._('Organization: {0}').format(source['organization']['name']) msg += '\n\n' msg += toolkit._('Harvest Job Id: {0}').format(last_job['id']) + '\n' msg += toolkit._('Created: {0}').format(last_job['created']) + '\n' msg += toolkit._('Finished: {0}').format(last_job['finished']) + '\n\n' report = get_action('harvest_job_report')(context, {'id': status['last_job']['id']}) msg += toolkit._('Records in Error: {0}').format(str(last_job['stats'].get('errored', 0))) msg += '\n' obj_error = '' job_error = '' for harvest_object_error in islice(report.get('object_errors'), 0, 20): obj_error += harvest_object_error['message'] + '\n' for harvest_gather_error in islice(report.get('gather_errors'), 0, 20): job_error += harvest_gather_error['message'] + '\n' if (obj_error != '' or job_error != ''): msg += toolkit._('Error Summary') msg += '\n' if (obj_error != ''): msg += toolkit._('Document Error') msg += '\n' + obj_error + '\n\n' if (job_error != ''): msg += toolkit._('Job Errors') msg += '\n' + job_error + '\n\n' if obj_error or job_error: msg += '\n--\n' msg += toolkit._('You are receiving this email because you are currently set-up as Administrator for {0}. Please do not reply to this email as it was sent from a non-monitored address.').format(config.get('ckan.site_title')) recipients = [] # gather sysadmins model = context['model'] sysadmins = model.Session.query(model.User).filter(model.User.sysadmin == True).all() for sysadmin in sysadmins: recipients.append({ 'name': sysadmin.name, 'email': sysadmin.email }) # gather organization-admins if source.get('organization'): members = get_action('member_list')(context, { 'id': source['organization']['id'], 'object_type': 'user', 'capacity': 'admin' }) for member in members: member_details = get_action('user_show')(context, {'id': member[0]}) if member_details['email']: recipients.append({ 'name': member_details['name'], 'email': member_details['email'] }) for recipient in recipients: email = {'recipient_name': recipient['name'], 'recipient_email': recipient['email'], 'subject': config.get('ckan.site_title') + ' - Harvesting Job - Error Notification', 'body': msg} try: mailer.mail_recipient(**email) except mailer.MailerException: log.error('Sending Harvest-Notification-Mail failed. Message: ' + msg) except Exception as e: log.error(e) raise
def _home_handle_error(package_id, exc): msg = toolkit._("Validation error: {0}".format(exc.error_summary)) h.flash(msg, category='alert-error') return p.toolkit.redirect_to('issues_home', package_id=package_id)
def organization_facets(self, facets_dict, organization_type, package_type): facets_dict['vocab_category'] = toolkit._('Category') facets_dict['security_classification'] = toolkit._('Security Classification') facets_dict['contains_personal_information'] = toolkit._('Contains Personal Data') return facets_dict
def copy_resources(self, id, data=None, errors=None, error_summary=None): context = { 'model': m, 'session': m.Session, 'user': p.toolkit.c.user or p.toolkit.c.author, 'auth_user_obj': p.toolkit.c.userobj, 'save': 'save' in t.request.params, } # check permissions try: t.check_access('package_create', context) except t.NotAuthorized: t.abort(401, t._('Unauthorized to copy this package')) # get package type if data and 'type' in data: package_type = data['type'] else: package_type = self._guess_package_type(True) resources = None if data is None: data = t.get_action('package_show')(None, {'id': id}) # generate new unused package name data['title'] = 'Copy of {0}'.format(data['title']) data['name'] = '{}-copy'.format(data['name']) while True: try: _ = t.get_action('package_show')(None, { 'name_or_id': data['name'] }) except l.NotFound: break else: import random data['name'] = '{}-copy-{}'.format(data['name'], random.randint(1, 100)) # remove unnecessary attributes from the dataset remove_attrs = [ 'id', 'revision_id', 'metadata_created', 'metadata_modified', 'revision_timestamp' ] for attr in remove_attrs: if attr in data: del data[attr] # process package resources resources = data.pop('resources', []) remove_attrs = ('id', 'revision_id', 'created', 'last_modified', 'package_id') for resource in resources: for attr in remove_attrs: if attr in resource: del resource[attr] c.resources_json = h.json.dumps(resources) form_snippet = 'package/copy_package_form.html' c.form_action = t.url_for( controller='ckanext.sokigo.controller:CopyController', action='copy_resources', id=id) if context['save'] and t.request.method == 'POST': data = clean_dict( dict_fns.unflatten( tuplize_dict( parse_params(t.request.POST, ignore_keys=CACHE_PARAMETERS)))) data['resources'] = resources # convert tags if not supplied in data if data and not data.get('tag_string'): data['tag_string'] = ', '.join( h.dict_list_reduce(data.get('tags', {}), 'name')) # if we are creating from a group then this allows the group to be # set automatically data['group_id'] = t.request.params.get('group') or \ t.request.params.get('groups__0__id') try: pkg_dict = t.get_action('package_create')(context, data) except l.NotAuthorized: t.abort(403, _('Unauthorized to read package %s') % '') except l.NotFound as e: t.abort(404, _('Dataset not found')) except dict_fns.DataError: t.abort(400, _(u'Integrity Error')) except SearchIndexError as e: try: exc_str = text_type(repr(e.args)) except Exception: # We don't like bare excepts exc_str = text_type(str(e)) t.abort(500, _(u'Unable to add package to search index.') + exc_str) except t.ValidationError as e: data['state'] = 'none' c.data = data c.errors_json = h.json.dumps(e.error_dict) form_vars = { 'data': data, 'errors': e.error_dict, 'error_summary': e.error_summary, 'action': 'new', 'stage': data['state'], 'dataset_type': package_type } extra_vars = { 'form_vars': form_vars, 'form_snippet': form_snippet, 'dataset_type': package_type } return t.render('package/copy.html', extra_vars=extra_vars) else: h.redirect_to(controller='package', action='read', id=pkg_dict['name']) c.data = data c.errors_json = h.json.dumps(errors) form_vars = { 'data': data, 'errors': errors or {}, 'error_summary': error_summary or {}, 'action': 'new', 'stage': data['state'], 'dataset_type': package_type } extra_vars = { 'form_vars': form_vars, 'form_snippet': form_snippet, 'dataset_type': package_type } return t.render('package/copy.html', extra_vars=extra_vars)
def validate_datarequest(context, request_data): errors = {} # Check name if len(request_data['title']) > constants.NAME_MAX_LENGTH: errors[tk._('Title')] = [tk._('Title must be a maximum of %d characters long') % constants.NAME_MAX_LENGTH] if not request_data['title']: errors[tk._('Title')] = [tk._('Title cannot be empty')] # Title is only checked in the database when it's correct avoid_existing_title_check = context['avoid_existing_title_check'] if 'avoid_existing_title_check' in context else False if 'Title' not in errors and not avoid_existing_title_check: if db.DataRequest.datarequest_exists(request_data['title']): errors[tk._('Title')] = [tk._('That title is already in use')] # Check description if common.get_config_bool_value('ckan.datarequests.description_required', False) and not request_data['description']: errors[tk._('Description')] = [tk._('Description cannot be empty')] if len(request_data['description']) > constants.DESCRIPTION_MAX_LENGTH: errors[tk._('Description')] = [tk._('Description must be a maximum of %d characters long') % constants.DESCRIPTION_MAX_LENGTH] # Check organization if request_data['organization_id']: try: tk.get_validator('group_id_exists')(request_data['organization_id'], context) except Exception: errors[tk._('Organization')] = [tk._('Organization is not valid')] if len(errors) > 0: raise tk.ValidationError(errors)
def user_create(context, data_dict): msg = toolkit._('Users cannot be created.') return _no_permissions(context, msg)
def group_facets(self, facets_dict, group_type, package_type): if 'groups' in facets_dict: facets_dict['groups'] = tk._('Topics') return facets_dict
def request_reset(context, data_dict): msg = toolkit._('Users cannot reset passwords.') return _no_permissions(context, msg)
def boolean_validator(key, data, errors, context): data[key] = asbool(data[key]) if not isinstance(data[key], bool): message = _('The {0} parameter must be a Boolean value.' .format(key[0])) errors[key].append(message)
def organization_facets(self, facets_dict, organization_type, package_type): if 'groups' in facets_dict: facets_dict['groups'] = tk._('Topics') return facets_dict
def dataset_facets(self, facets_dict, package_type): return OrderedDict([('vocab_custom_topics', 'Topics'), ('vocab_custom_subtopics', 'Subtopics'), ('tags', toolkit._('Tags')), ('res_format', toolkit._('Formats')), ('organization', toolkit._('Organizations'))])