Exemplo n.º 1
0
    def setup_class(cls):
        wsgiapp = middleware.make_app(config['global_conf'], **config)
        cls.app = paste.fixture.TestApp(wsgiapp)
        if not tests.is_datastore_supported():
            raise nose.SkipTest("Datastore not supported")
        p.load('datastore')
        ctd.CreateTestData.create()
        cls.sysadmin_user = model.User.get('testsysadmin')
        cls.normal_user = model.User.get('annafan')
        resource = model.Package.get('annakarenina').resources[0]
        cls.data = {
            'resource_id':
            resource.id,
            'force':
            True,
            'aliases':
            'books',
            'fields': [{
                'id': u'b\xfck',
                'type': 'text'
            }, {
                'id': 'author',
                'type': 'text'
            }, {
                'id': 'published'
            }, {
                'id': u'characters',
                u'type': u'_text'
            }, {
                'id': 'random_letters',
                'type': 'text[]'
            }],
            'records': [{
                u'b\xfck': 'annakarenina',
                'author': 'tolstoy',
                'published': '2005-03-01',
                'nested': ['b', {
                    'moo': 'moo'
                }],
                u'characters': [u'Princess Anna', u'Sergius'],
                'random_letters': ['a', 'e', 'x']
            }, {
                u'b\xfck': 'warandpeace',
                'author': 'tolstoy',
                'nested': {
                    'a': 'b'
                },
                'random_letters': []
            }]
        }
        postparams = '%s=1' % json.dumps(cls.data)
        auth = {'Authorization': str(cls.sysadmin_user.apikey)}
        res = cls.app.post('/api/action/datastore_create',
                           params=postparams,
                           extra_environ=auth)
        res_dict = json.loads(res.body)
        assert res_dict['success'] is True

        engine = db.get_write_engine()
        cls.Session = orm.scoped_session(orm.sessionmaker(bind=engine))
Exemplo n.º 2
0
    def _read_connection_has_correct_privileges(self):
        ''' Returns True if the right permissions are set for the read
        only user. A table is created by the write user to test the
        read only user.
        '''
        write_connection = db.get_write_engine().connect()
        read_connection_user = sa_url.make_url(self.read_url).username

        drop_foo_sql = u'DROP TABLE IF EXISTS _foo'

        write_connection.execute(drop_foo_sql)

        try:
            write_connection.execute(u'CREATE TEMP TABLE _foo ()')
            for privilege in ['INSERT', 'UPDATE', 'DELETE']:
                test_privilege_sql = u"SELECT has_table_privilege(%s, '_foo', %s)"
                have_privilege = write_connection.execute(
                    test_privilege_sql,
                    (read_connection_user, privilege)).first()[0]
                if have_privilege:
                    return False
        finally:
            write_connection.execute(drop_foo_sql)
            write_connection.close()
        return True
Exemplo n.º 3
0
def datastore_run_triggers(context, data_dict):
    ''' update each record with trigger

    The datastore_run_triggers API action allows you to re-apply exisitng triggers to
    an existing DataStore resource.

    :param resource_id: resource id that the data is going to be stored under.
    :type resource_id: string

    **Results:**

    :returns: The rowcount in the table.
    :rtype: int

    '''
    res_id = data_dict['resource_id']
    p.toolkit.check_access('datastore_trigger_each_row', context, data_dict)

    connection = db.get_write_engine().connect()

    sql = sqlalchemy.text(u'''update {0} set _id=_id '''.format(
        datastore_helpers.identifier(res_id)))
    try:
        results = connection.execute(sql)
    except sqlalchemy.exc.DatabaseError as err:
        message = err.args[0].split('\n')[0].decode('utf8')
        raise p.toolkit.ValidationError(
            {u'records': [message.split(u') ', 1)[-1]]})
    return results.rowcount
Exemplo n.º 4
0
    def setup_class(cls):
        if not tests.is_datastore_supported():
            raise nose.SkipTest("Datastore not supported")
        p.load('datastore')
        ctd.CreateTestData.create()
        cls.sysadmin_user = model.User.get('testsysadmin')
        cls.normal_user = model.User.get('annafan')
        set_url_type(
            model.Package.get('annakarenina').resources, cls.sysadmin_user)
        resource = model.Package.get('annakarenina').resources[0]
        cls.data = {
            'resource_id':
            resource.id,
            'fields': [{
                'id': u'b\xfck',
                'type': 'text'
            }, {
                'id': 'author',
                'type': 'text'
            }, {
                'id': 'nested',
                'type': 'json'
            }, {
                'id': 'characters',
                'type': 'text[]'
            }, {
                'id': 'published'
            }],
            'primary_key':
            u'b\xfck',
            'records': [{
                u'b\xfck': 'annakarenina',
                'author': 'tolstoy',
                'published': '2005-03-01',
                'nested': ['b', {
                    'moo': 'moo'
                }]
            }, {
                u'b\xfck': 'warandpeace',
                'author': 'tolstoy',
                'nested': {
                    'a': 'b'
                }
            }]
        }
        postparams = '%s=1' % json.dumps(cls.data)
        auth = {'Authorization': str(cls.sysadmin_user.apikey)}
        res = cls.app.post('/api/action/datastore_create',
                           params=postparams,
                           extra_environ=auth)
        res_dict = json.loads(res.body)
        assert res_dict['success'] is True

        engine = db.get_write_engine()
        cls.Session = orm.scoped_session(orm.sessionmaker(bind=engine))
Exemplo n.º 5
0
def _is_legacy_mode(config):
    '''
        Decides if the DataStore should run on legacy mode

        Returns True if `ckan.datastore.read_url` is not set in the provided
        config object or CKAN is running on Postgres < 9.x
    '''
    engine = db.get_write_engine()
    connection = engine.connect()

    return (not config.get('ckan.datastore.read_url')
            or not db._pg_version_is_at_least(connection, '9.0'))
Exemplo n.º 6
0
 def setup_class(cls):
     wsgiapp = middleware.make_app(config['global_conf'], **config)
     cls.app = paste.fixture.TestApp(wsgiapp)
     if not tests.is_datastore_supported():
         raise nose.SkipTest("Datastore not supported")
     p.load('datastore')
     p.load('datapusher')
     ctd.CreateTestData.create()
     cls.sysadmin_user = model.User.get('testsysadmin')
     cls.normal_user = model.User.get('annafan')
     engine = db.get_write_engine()
     cls.Session = orm.scoped_session(orm.sessionmaker(bind=engine))
     set_url_type(
         model.Package.get('annakarenina').resources, cls.sysadmin_user)
Exemplo n.º 7
0
    def setup_class(cls):
        wsgiapp = middleware.make_app(config['global_conf'], **config)
        cls.app = paste.fixture.TestApp(wsgiapp)
        if not tests.is_datastore_supported():
            raise nose.SkipTest("Datastore not supported")
        p.load('datastore')
        p.load('datapusher')
        p.load('test_datapusher_plugin')

        resource = factories.Resource(url_type='datastore')
        cls.dataset = factories.Dataset(resources=[resource])

        cls.sysadmin_user = factories.User(name='testsysadmin', sysadmin=True)
        cls.normal_user = factories.User(name='annafan')
        engine = db.get_write_engine()
        cls.Session = orm.scoped_session(orm.sessionmaker(bind=engine))
Exemplo n.º 8
0
    def setup_class(cls):

        if not config.get('ckan.datastore.read_url'):
            raise nose.SkipTest('Datastore runs on legacy mode, skipping...')

        engine = db.get_write_engine()
        cls.Session = orm.scoped_session(orm.sessionmaker(bind=engine))

        datastore_test_helpers.clear_db(cls.Session)

        create_tables = [
            u'CREATE TABLE test_a (id_a text)',
            u'CREATE TABLE test_b (id_b text)',
            u'CREATE TABLE "TEST_C" (id_c text)',
            u'CREATE TABLE test_d ("α/α" integer)',
        ]
        for create_table_sql in create_tables:
            cls.Session.execute(create_table_sql)
Exemplo n.º 9
0
    def setup_class(cls):
        if not tests.is_datastore_supported():
            raise nose.SkipTest("Datastore not supported")
        p.load('datastore')
        ctd.CreateTestData.create()
        cls.sysadmin_user = model.User.get('testsysadmin')
        cls.normal_user = model.User.get('annafan')
        resource = model.Package.get('annakarenina').resources[0]
        cls.data = {
            'resource_id': resource.id,
            'aliases': u'b\xfck2',
            'fields': [{'id': 'book', 'type': 'text'},
                       {'id': 'author', 'type': 'text'},
                       {'id': 'rating with %', 'type': 'text'}],
            'records': [{'book': 'annakarenina', 'author': 'tolstoy',
                         'rating with %': '90%'},
                        {'book': 'warandpeace', 'author': 'tolstoy',
                         'rating with %': '42%'}]
        }

        engine = db.get_write_engine()
        cls.Session = orm.scoped_session(orm.sessionmaker(bind=engine))
        set_url_type(
            model.Package.get('annakarenina').resources, cls.sysadmin_user)
Exemplo n.º 10
0
def datastore_search(context, data_dict):
    '''Search a DataStore resource.

    The datastore_search action allows you to search data in a resource.
    DataStore resources that belong to private CKAN resource can only be
    read by you if you have access to the CKAN resource and send the appropriate
    authorization.

    :param resource_id: id or alias of the resource to be searched against
    :type resource_id: string
    :param filters: matching conditions to select, e.g {"key1": "a", "key2": "b"} (optional)
    :type filters: dictionary
    :param q: full text query. If it's a string, it'll search on all fields on
              each row. If it's a dictionary as {"key1": "a", "key2": "b"},
              it'll search on each specific field (optional)
    :type q: string or dictionary
    :param distinct: return only distinct rows (optional, default: false)
    :type distinct: bool
    :param plain: treat as plain text query (optional, default: true)
    :type plain: bool
    :param language: language of the full text query (optional, default: english)
    :type language: string
    :param limit: maximum number of rows to return (optional, default: 100)
    :type limit: int
    :param offset: offset this number of rows (optional)
    :type offset: int
    :param fields: fields to return (optional, default: all fields in original order)
    :type fields: list or comma separated string
    :param sort: comma separated field names with ordering
                 e.g.: "fieldname1, fieldname2 desc"
    :type sort: string

    Setting the ``plain`` flag to false enables the entire PostgreSQL `full text search query language`_.

    A listing of all available resources can be found at the alias ``_table_metadata``.

    .. _full text search query language: http://www.postgresql.org/docs/9.1/static/datatype-textsearch.html#DATATYPE-TSQUERY

    If you need to download the full resource, read :ref:`dump`.

    **Results:**

    The result of this action is a dictionary with the following keys:

    :rtype: A dictionary with the following keys
    :param fields: fields/columns and their extra metadata
    :type fields: list of dictionaries
    :param offset: query offset value
    :type offset: int
    :param limit: query limit value
    :type limit: int
    :param filters: query filters
    :type filters: list of dictionaries
    :param total: number of total matching records
    :type total: int
    :param records: list of matching results
    :type records: list of dictionaries

    '''
    schema = context.get('schema', dsschema.datastore_search_schema())
    data_dict, errors = _validate(data_dict, schema, context)
    if errors:
        raise p.toolkit.ValidationError(errors)

    res_id = data_dict['resource_id']

    resources_sql = sqlalchemy.text(u'''SELECT alias_of FROM "_table_metadata"
                                        WHERE name = :id''')
    # XXX: write connection because of private tables, we
    # should be able to make this read once we stop using pg
    # permissions enforcement
    results = db.get_write_engine().execute(resources_sql, id=res_id)

    # Resource only has to exist in the datastore (because it could be an alias)
    if not results.rowcount > 0:
        raise p.toolkit.ObjectNotFound(
            p.toolkit._('Resource "{0}" was not found.'.format(res_id)))

    if not data_dict['resource_id'] in WHITELISTED_RESOURCES:
        # Replace potential alias with real id to simplify access checks
        resource_id = results.fetchone()[0]
        if resource_id:
            data_dict['resource_id'] = resource_id

        p.toolkit.check_access('datastore_search', context, data_dict)

    result = db.search(context, data_dict)
    result.pop('id', None)
    return result
Exemplo n.º 11
0
def datastore_upsert(context, data_dict):
    '''Updates or inserts into a table in the DataStore

    The datastore_upsert API action allows you to add or edit records to
    an existing DataStore resource. In order for the *upsert* and *update*
    methods to work, a unique key has to be defined via the datastore_create
    action. The available methods are:

    *upsert*
        Update if record with same key already exists, otherwise insert.
        Requires unique key.
    *insert*
        Insert only. This method is faster that upsert, but will fail if any
        inserted record matches an existing one. Does *not* require a unique
        key.
    *update*
        Update only. An exception will occur if the key that should be updated
        does not exist. Requires unique key.


    :param resource_id: resource id that the data is going to be stored under.
    :type resource_id: string
    :param force: set to True to edit a read-only resource
    :type force: bool (optional, default: False)
    :param records: the data, eg: [{"dob": "2005", "some_stuff": ["a","b"]}] (optional)
    :type records: list of dictionaries
    :param method: the method to use to put the data into the datastore.
                   Possible options are: upsert, insert, update (optional, default: upsert)
    :type method: string

    **Results:**

    :returns: The modified data object.
    :rtype: dictionary

    '''
    schema = context.get('schema', dsschema.datastore_upsert_schema())
    records = data_dict.pop('records', None)
    data_dict, errors = _validate(data_dict, schema, context)
    if records:
        data_dict['records'] = records
    if errors:
        raise p.toolkit.ValidationError(errors)

    p.toolkit.check_access('datastore_upsert', context, data_dict)

    if not data_dict.pop('force', False):
        resource_id = data_dict['resource_id']
        _check_read_only(context, resource_id)

    res_id = data_dict['resource_id']
    resources_sql = sqlalchemy.text(u'''SELECT 1 FROM "_table_metadata"
                                        WHERE name = :id AND alias_of IS NULL'''
                                    )
    results = db.get_write_engine().execute(resources_sql, id=res_id)
    res_exists = results.rowcount > 0

    if not res_exists:
        raise p.toolkit.ObjectNotFound(
            p.toolkit._(u'Resource "{0}" was not found.'.format(res_id)))

    result = db.upsert(context, data_dict)
    result.pop('id', None)
    return result
Exemplo n.º 12
0
 def setup_class(cls):
     engine = db.get_write_engine()
     rebuild_all_dbs(orm.scoped_session(orm.sessionmaker(bind=engine)))
     super(DatastoreFunctionalTestBase, cls).setup_class()