Ejemplo n.º 1
0
def get_cube(context, data_dict):
    """
    Return a dict representation of a cube, given a cubeId, if it exists.

    :param cubeId: ID of the cube to retrieve. (i.e. 1310001)
    :type cubeId: str

    :return: requested cube
    :rtype: dict

    :raises: ValidationError, ObjectObjectNotFound
    """
    cube_id = _get_or_bust(data_dict, 'cubeId')
    lc = ckanapi.LocalCKAN(context=context)
    result = lc.action.package_search(
        q=('type:cube AND '
           'product_id_new:{cube_id}').format(cube_id=cube_id),
        rows=1)

    if not result['count']:
        raise ObjectNotFound('Cube not found')
    elif result['count'] > 1:
        raise ValidationError('More than one cube with given cubeid found')
    else:
        return result['results'][-1]
Ejemplo n.º 2
0
def featured_upsert(context, data_dict):
    data, errors = df.validate(data_dict, schema, context)

    if errors:
        raise ValidationError(errors)

    featured = db.Featured.get(resource_view_id=data['resource_view_id'])
    if featured is None:
        featured = db.Featured()

    featured.resource_view_id = data['resource_view_id']

    if data.has_key('canonical'):
        featured.canonical = data['canonical']

    if data.has_key('homepage'):
        featured.homepage = data['homepage']

    resource_id = model.ResourceView.get(featured.resource_view_id).resource_id
    featured.package_id = model.Resource.get(resource_id).package_id

    featured.save()

    session = context['session']
    session.add(featured)
    session.commit()

    return table_dictize(featured, context)
Ejemplo n.º 3
0
 def validate_solr_args(args):
     u'''Validates the arguments for usage in Solr.
     '''
     msg = {}
     for argn, _ in args.items():
         if argn not in VALID_SOLR_ARGS:
             msg[argn] = translate('Invalid parameter')
     if len(msg):
         raise ValidationError(msg)
Ejemplo n.º 4
0
def user_auth(context, data_dict):
    '''Authenticates a user

    You must be a system administrator to authenticate users.

    :param email: the email address of the user
    :type email: string
    :param password: the password of the user
    :type password: string

    :returns: the newly created user
    :rtype: dictionary

    '''
    model = context['model']
    session = context['session']

    _check_access('user_auth', context, data_dict)

    email = data_dict.get('email')
    password = data_dict.get('password')

    if not (email and password):
        raise ValidationError(['email and password are both required'])

    users = User.by_email(email)
    user = users[0] if users else None

    if (user is None) or \
            (not user.is_active()) or \
            (not user.validate_password(password)):
        raise ValidationError(['There was a problem authenticating this user'])

    user_dict = model_dictize.user_dictize(user, context)

    ## Get the user's organisation list to return with the login details
    fOrgList = get_action('organization_list_for_user')
    user_dict['organisations'] = fOrgList(context, {'id': user.name})

    # DGU Hack: added encoding so we don't barf on unicode user names
    log.debug(
        'Authenticated user {name}'.format(name=user.name.encode('utf8')))
    return user_dict
Ejemplo n.º 5
0
def featured_show(context, data_dict):
    data, errors = df.validate(data_dict, schema_get, context)

    if errors:
        raise ValidationError(errors)

    featuredn = db.Featurednumbers.get(id_fn=data['id_fn'])
    if featuredn is None:
        raise NotFound()

    return table_dictize(featuredn, context)
Ejemplo n.º 6
0
def featured_show(context, data_dict):
    data, errors = df.validate(data_dict, schema_get, context)

    if errors:
        raise ValidationError(errors)

    featured = db.Featured.get(resource_view_id=data['resource_view_id'])
    if featured is None:
        raise NotFound()

    return table_dictize(featured, context)
Ejemplo n.º 7
0
def inventory_entry_bulk_create(context, data_dict):
    model = context['model']
    schema = context['schema']
    session = context['session']

    organization = model.Group.get(context['organization_name'])
    inventory_entry_dict = {'group_id': organization.id}

    if not data_dict['field-name-input-0']:
        raise ValidationError(
            {'error': [_('Please add at least one inventory entry.')]})

    for inventory_entry_id in range(10):
        inventory_entry_name = data_dict['field-name-input-' +
                                         str(inventory_entry_id)]
        if not inventory_entry_name:
            break

        inventory_entry_dict['title'] = inventory_entry_name
        inventory_entry_dict['recurring_interval'] = data_dict[
            'field-recurring-input-' + str(inventory_entry_id)]
        inventory_entry_dict['is_recurring'] = (
            inventory_entry_dict['recurring_interval'] != '0')

        data, errors = navl_validate(inventory_entry_dict, schema, context)

        if errors:
            session.rollback()
            # TODO @palcu: i18n
            raise ValidationError({
                'error': [
                    _('Verificati intrarea cu numarul {0}.'.format(
                        inventory_entry_id + 1))
                ]
            })

        obj = table_dict_save(inventory_entry_dict, InventoryEntry, context)

    model.repo.commit()

    return table_dictize(obj, context)
Ejemplo n.º 8
0
def webhook_show(context, data_dict):
    check_access("webhook_show", context, data_dict)

    data, errors = df.validate(data_dict, schema_get, context)
    if errors:
        raise ValidationError(errors)

    webhook = db.Webhook.get(id=data['id'])
    if webhook is None:
        raise NotFound()

    return table_dictize(webhook, context)
Ejemplo n.º 9
0
def webhook_list(context, data_dict):
    check_access("webhook_list", context, data_dict)

    data, errors = df.validate(data_dict, schema_list, context)
    if errors:
        raise ValidationError(errors)

    webhooks = db.Webhook.find(topic=data['topic']).all()
    if webhooks is None:
        raise NotFound()

    ids = [webhook.id for webhook in webhooks]

    return ids
Ejemplo n.º 10
0
def get_next_cube_id(context, data_dict):
    """
    Returns the next available cube_id (without registering it).

    :param subjectCode: two-digit subjectCode (i.e. '24')
    :type subjectCode: str

    :return: next available cube_id
    :rtype: str

    :raises: ValidationError
    """
    subject_code = _get_or_bust(data_dict, 'subjectCode')
    if not re.match('^\d\d$', subject_code):
        raise ValidationError(_('Invalid subject_code'), )

    lc = ckanapi.LocalCKAN(context=context)
    response = lc.action.package_search(
        q=('product_id_new:{subject_code}10* AND '
           'dataset_type:cube').format(subject_code=subject_code),
        sort='product_id_new desc',
        rows=1)

    if response['results']:
        result = response['results'][0]
        oldest_product_id = result['product_id_new']
        if oldest_product_id.endswith('9999'):
            # FIXME: This system is going to encounter numerous
            #        problems down the road.
            raise ValidationError(
                'All Cube IDs for this subject have been registered.'
                'Reusing IDs is in development.')

        return str(int(oldest_product_id) + 1)

    return '{subject_code}100001'.format(subject_code=subject_code)
Ejemplo n.º 11
0
def webhook_delete(context, data_dict):
    check_access("webhook_delete", context, data_dict)

    data, errors = df.validate(data_dict, schema_get, context)
    if errors:
        raise ValidationError(errors)

    webhook = db.Webhook.get(id=data['id'])
    if webhook is None:
        raise NotFound()

    session = context['session']
    session.delete(webhook)
    session.commit()

    return data['id']
Ejemplo n.º 12
0
def changed_packages_activity_list_since(context, data_dict):
    '''Return the activity stream of all recently added or changed packages.

    :param since_time: starting date/time

    Limited to 31 records (configurable via the
    ckan.activity_list_hard_limit setting) but may be called repeatedly
    with the timestamp of the last record to collect all activities.

    :rtype: list of dictionaries
    '''

    since = get_or_bust(data_dict, 'since_time')
    try:
        since_time = isodate(since, None)
    except Invalid, e:
        raise ValidationError({'since_time': e.error})
Ejemplo n.º 13
0
def register_cube(context, data_dict):
    """
    Register a new cube.  Automatically populate
    subjectcode fields based on provided parameters.

    :param subjectCode: two-digit subject_code code (i.e. 13)
    :type subjectCode: str
    :param productTitleEnglish: english title
    :type productTitleEnglish: unicode
    :param productTitleFrench: french title
    :type productTitleFrench: unicode

    :return: new package
    :rtype: dict

    :raises: ValidationError
    """
    subject_code = _get_or_bust(data_dict, 'subjectCode')
    title_en = _get_or_bust(data_dict, 'productTitleEnglish')
    title_fr = _get_or_bust(data_dict, 'productTitleFrench')

    if not len(subject_code) == 2:
        raise ValidationError('subjectCode not valid')

    lc = ckanapi.LocalCKAN(context=context)

    product_type_dict = lc.action.GetProductType(productType=CUBE_PRODUCT_TYPE)

    new_package = lc.action.package_create(
        # Old method simply used the product_id, whereas the modern edit
        # form validator uses cube-{product_id}, so lets go with that.
        owner_org='statcan',
        private=False,
        type=u'cube',
        product_type_code=product_type_dict['product_type_code'],
        subject_codes=[subject_code],
        title={
            'en': title_en,
            'fr': title_fr,
        },
        # '2' is "Draft" status, according to the ndm_publish_status
        # preset.
        last_publish_status_code='2')

    # Return our newly created package.
    return lc.action.GetCube(cubeId=new_package['product_id_new'])
Ejemplo n.º 14
0
def ckan_params_to_solr_args(data_dict):
    u'''Transforms the parameters parsed by CKAN as data dict to the
    appropriate (py)solr query arguments.

    Every entry in the dictionary will be validated and transformed to a solr
    arguments. Valid argument names are 'q', 'fq', 'rows', 'start', 'sort',
    'fl' and 'df'.

    A `ValidationError` will be raised if the data dict contains arguments that
    are not valid.

    :param data_dict: ``dict``, CKAN parameters as dictionary.

    :returns: ``dict`` with the transformed arguments valid for `pysolr`.
    '''
    solr_args = {}
    provided = {}
    provided.update(data_dict)
    facets = {}
    for argn in VALID_SOLR_ARGS:
        if provided.get(argn) is not None:
            if argn == 'facet.field':
                facets['facet.field'] = provided.pop(argn)
            else:
                solr_args[argn] = provided.pop(argn)

    q = []
    if solr_args.get('q'):
        _q = solr_args.get('q')
        if isinstance(_q, str) or isinstance(_q, unicode):
            q.append(escape_str(_q))
        elif isinstance(_q, dict):
            for prop, val in _q.items():
                q.append('%s:%s' % (str(prop), escape_str(str(val))))
        else:
            raise ValidationError({'q': _('Invalid value type')})

    for prop, val in provided.items():
        q.append('%s:%s' % (str(prop), escape_str(str(val))))

    solr_args['q'] = ' AND '.join(sorted(q))
    if facets:
        solr_args.update(facets)
    return solr_args
Ejemplo n.º 15
0
def update_cube(context, data_dict):
    cube_id = _get_or_bust(data_dict, 'cubeId')
    cube_data = _get_or_bust(data_dict, 'cubeData')

    lc = ckanapi.LocalCKAN(context=context)

    result = lc.action.package_search(
        q=('type:cube AND product_id_new:{cube_id}'.format(cube_id=cube_id)),
        rows=1)

    if not result['count']:
        raise ObjectNotFound('Cube not found')
    elif result['count'] > 1:
        raise ValidationError('More than one cube with given cubeid found')

    cube = result['results'][0]
    cube.update(cube_data)

    return lc.action.package_update(**cube)
Ejemplo n.º 16
0
def webhook_create(context, data_dict):

    check_access("webhook_create", context, data_dict)

    data, errors = df.validate(data_dict, schema, context)

    if errors:
        raise ValidationError(errors)

    webhook = db.Webhook()
    webhook.address = data['address']
    webhook.topic = data['topic']
    webhook.user_id = model.User.get(context['user']).id
    webhook.save()

    session = context['session']
    session.add(webhook)
    session.commit()

    return webhook.id
Ejemplo n.º 17
0
def inventory_entry_update(context, data_dict):
    # TODO @palcu: DRY this w/ inventory_entry_create
    model = context['model']
    schema = context['schema']
    session = context['session']

    organization = model.Group.get(context['organization_name'])
    data_dict['group_id'] = organization.id
    data_dict['is_recurring'] = (data_dict['recurring_interval'] != '0')

    data, errors = navl_validate(data_dict, schema, context)

    if errors:
        session.rollback()
        raise ValidationError(errors)

    obj = table_dict_save(data_dict, InventoryEntry, context)
    model.repo.commit()

    return table_dictize(obj, context)
Ejemplo n.º 18
0
def featuredn_create(context, data_dict):
    data, errors = df.validate(data_dict, schema, context)

    if errors:
        raise ValidationError(errors)

    featuredn = db.Featurednumber()
    featuredn.id_featured_number = data['id_fn']
    featuredn.number = data.get('number', False)
    featuredn.unit = data.get('unit', False)

    #resource_id = model.ResourceView.get(featured.resource_view_id).resource_id
    #featured.package_id = model.Package.get(resource_id).package_id

    featuredn.save()

    session = context['session']
    session.add(featuredn)
    session.commit()

    return table_dictize(featuredn, context)
Ejemplo n.º 19
0
def featured_create(context, data_dict):
    data, errors = df.validate(data_dict, schema, context)

    if errors:
        raise ValidationError(errors)

    featured = db.Featured()
    featured.resource_view_id = data['resource_view_id']
    featured.canonical = data.get('canonical', False)
    featured.homepage = data.get('homepage', False)

    resource_id = model.ResourceView.get(featured.resource_view_id).resource_id
    featured.package_id = model.Package.get(resource_id).package_id

    featured.save()

    session = context['session']
    session.add(featured)
    session.commit()

    return table_dictize(featured, context)
Ejemplo n.º 20
0
def get_subject(context, data_dict):
    """
    :param: subjectCode: Subject Code (i.e. 13)
    :type: str

    :return: English, French and code values for given subjectCode
    :rtype: dict
    """
    lc = ckanapi.LocalCKAN(context=context)
    subject_code = get_or_bust(data_dict, 'subjectCode')

    response = lc.action.package_search(
        q='dataset_type:subject AND subject_code:{value}'.format(
            value=subject_code
        )
    )

    if not response['count']:
        raise ObjectNotFound(
            'No subject found with subject code {subject_code}'.format(
                subject_code=subject_code
            )
        )
    elif response['count'] > 1:
        raise ValidationError((
            'Duplicate SubjectCodes have been entered '
            'in CKAN {subject_code}'
        ).format(
            subject_code
        ))
    else:
        r = response['results'][0]
        return {
            'title': r['title'],
            'subject_code': r['subject_code']
        }
Ejemplo n.º 21
0
def get_cube_list_by_subject(context, data_dict):
    """
    Return a dict with all Cube Ids and French/English titles based on a
    provided SubjectCode.

    :param subjectCode: two-digit subject code (i.e. 13)
    :type subjectCode: str

    :return: registered cubes for the SubjectCode and their
             French/English titles
    :rtype: list of dicts

    :raises: ValidationError, ObjectObjectNotFound
    """
    subject_code = _get_or_bust(data_dict, 'subjectCode')

    if len(subject_code) != 2:
        raise ValidationError('invalid subjectcode')

    lc = ckanapi.LocalCKAN(context=context)
    result = lc.action.package_search(
        q=('dataset_type:cube AND '
           '(subject_codes:{code} OR '
           'subject_codes:{code}*)').format(code=subject_code),
        rows=1000)

    count = result['count']
    if not count:
        raise ObjectNotFound(
            'Found no cubes with subject code {subject_code}'.format(
                subject_code=subject_code))
    else:
        return [{
            u'title': r['title'],
            u'cube_id': r['product_id_new']
        } for r in result['results']]
Ejemplo n.º 22
0
    def __init__(self, resource):

        config_mimetype_guess = config.get('ckan.mimetype_guess', 'file_ext')

        self.filename = None
        self.mimetype = None

        raw_link_path = resource.get('link_path', resource.get('url', ''))

        if raw_link_path.startswith("\\"):
            link_path = PureWindowsPath(raw_link_path)
        else:
            link_path = PurePath(raw_link_path)

        self.mount_path = None
        self.clear = resource.pop('clear_upload', None)

        self.filesize = 0  # bytes
        self.filename = munge.munge_filename(link_path.name)
        self.link_path = link_path.as_posix()

        # Construct mountpoint dictionary
        mountpoints = dict(
            zip(
                shlex.split(
                    config.get('ckanext.linked_storage.net_paths', None)),
                shlex.split(
                    config.get('ckanext.linked_storage.mountpoints', None))))

        log.debug('Available mountpoints: %r', mountpoints)

        for m in mountpoints:
            if self.link_path.upper().startswith(m.upper()):
                self.mount_path = mountpoints[m] + self.link_path[len(m):]

        if not self.mount_path:
            raise ValidationError(
                'Unable to locate file via the known mount points')

        resource['url_type'] = 'upload'  # appear to CKAN as an upload
        resource[
            'url'] = self.filename  # use filename just like an uploaded file
        resource['link_path'] = self.link_path
        resource['last_modified'] = datetime.datetime.fromtimestamp(
            os.path.getmtime(self.mount_path))

        with open(self.mount_path, 'rb') as f:

            f.seek(0, os.SEEK_END)
            self.filesize = f.tell()
            # go back to the beginning of the file buffer
            f.seek(0, os.SEEK_SET)

            # check if the mimetype failed from guessing with the url
            if not self.mimetype and config_mimetype_guess == 'file_ext':
                self.mimetype = mimetypes.guess_type(self.filename)[0]

            if not self.mimetype and config_mimetype_guess == 'file_contents':
                try:
                    self.mimetype = magic.from_buffer(f.read(), mime=True)
                    f.seek(0, os.SEEK_SET)
                except IOError as e:
                    # Not that important if call above fails
                    self.mimetype = None
Ejemplo n.º 23
0
def resource_create(context, data_dict):
    '''Wraps the original CKAN resource creation
    to handle XLS organogram uploads and split them into
    component CSVs.

    Passes all non-organogram resources through to CKAN
    as normal.

    See ckan.logic.action.create.resource_create for
    documentation on the original function.
    '''
    mimetype = mimetypes.guess_type(data_dict['url'])[0]
    log.debug("Mimetype: %s" % mimetype)

    if mimetype in (
            'application/vnd.ms-excel',
            'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
    ):
        log.debug("Excel file detected")

        package_id = get_or_bust(data_dict, 'package_id')

        pkg_dict = get_action('package_show')(dict(context,
                                                   return_type='dict'), {
                                                       'id': package_id
                                                   })

        organogram_ids = {
            '538b857a-64ba-490e-8440-0e32094a28a7',  # Local authority
            'd3c0b23f-6979-45e4-88ed-d2ab59b005d0',  # Departmental
        }

        if pkg_dict.get('schema-vocabulary') in organogram_ids:
            log.debug("Organogram detected")

            file_handle = data_dict['upload'].file

            errors, warnings, senior_csv, junior_csv = create_organogram_csvs(
                file_handle)

            if errors:
                context['session'].rollback()

                error_items = [(str(i + 1), error)
                               for i, error in enumerate(errors)]
                error_summary = OrderedDict(error_items)

                raise ValidationError(errors, error_summary)
            else:
                log.debug("Valid organogram Excel file found")
                timestamp = datetime.utcnow()
                timestamp_str = timestamp.strftime("%Y-%m-%dT%H-%M-%SZ")

                senior_resource = _create_csv_resource('Senior', senior_csv,
                                                       data_dict.copy(),
                                                       context, timestamp_str)
                junior_resource = _create_csv_resource('Junior', junior_csv,
                                                       data_dict.copy(),
                                                       context, timestamp_str)

                return senior_resource

    log.debug("Passing args through to the CKAN resource_create")
    return resource_create_core(context, data_dict)
Ejemplo n.º 24
0
def user_create(context, data_dict):
    '''Create a new user.

    You must be authorized to create users.

    :param name: the name of the new user, a string between 2 and 100
        characters in length, containing only lowercase alphanumeric
        characters, ``-`` and ``_``
    :type name: string
    :param email: the email address for the new user
    :type email: string
    :param password: the password of the new user, a string of at least 4
        characters
    :type password: string
    :param id: the id of the new user (optional)
    :type id: string
    :param fullname: the full name of the new user (optional)
    :type fullname: string
    :param about: a description of the new user (optional)
    :type about: string
    :param openid: (optional)
    :type openid: string

    :returns: the newly created user
    :rtype: dictionary

    '''
    model = context['model']
    schema = context.get('schema') or ckan.logic.schema.default_user_schema()
    session = context['session']

    _check_access('user_create', context, data_dict)

    data, errors = _validate(data_dict, schema, context)

    if errors:
        session.rollback()
        raise ValidationError(errors)

    # user schema prevents non-sysadmins from providing password_hash
    if 'password_hash' in data:
        data['_password'] = data.pop('password_hash')

    user = model_save.user_dict_save(data, context)

    # Flush the session to cause user.id to be initialised, because
    # activity_create() (below) needs it.
    session.flush()

    activity_create_context = {
        'model': model,
        'user': context['user'],
        'defer_commit': True,
        'ignore_auth': True,
        'session': session
    }
    activity_dict = {
        'user_id': user.id,
        'object_id': user.id,
        'activity_type': 'new user',
    }
    get_action('activity_create')(activity_create_context, activity_dict)

    if not context.get('defer_commit'):
        model.repo.commit()

    # A new context is required for dictizing the newly constructed user in
    # order that all the new user's data is returned, in particular, the
    # api_key.
    #
    # The context is copied so as not to clobber the caller's context dict.
    user_dictize_context = context.copy()
    user_dictize_context['keep_apikey'] = True
    user_dictize_context['keep_email'] = True
    user_dict = model_dictize.user_dictize(user, user_dictize_context)

    context['user_obj'] = user
    context['id'] = user.id

    model.Dashboard.get(user.id)  # Create dashboard for user.

    # DGU Hack: added encoding so we don't barf on unicode user names
    log.debug('Created user {name}'.format(name=user.name.encode('utf8')))
    return user_dict