예제 #1
0
 def test_dataset_was_harvested(self):
     fb_dataset_dict, source, job = self._harvester_setup(
         FISBROKER_HARVESTER_CONFIG)
     fb_dataset = Package.get(fb_dataset_dict.get('name'))
     assert dataset_was_harvested(fb_dataset)
     non_fb_dataset_dict = ckan_factories.Dataset()
     non_fb_dataset = Package.get(non_fb_dataset_dict.get('name'))
     assert not dataset_was_harvested(non_fb_dataset)
예제 #2
0
 def test_harvester_for_package(self):
     fb_dataset_dict, source, job = self._harvester_setup(
         FISBROKER_HARVESTER_CONFIG)
     fb_dataset = Package.get(fb_dataset_dict.get('name'))
     assert harvester_for_package(fb_dataset) is source
     non_fb_dataset_dict = ckan_factories.Dataset()
     non_fb_dataset = Package.get(non_fb_dataset_dict.get('name'))
     assert harvester_for_package(non_fb_dataset) is None
예제 #3
0
    def test_reimport_invalid_dataset_triggers_deletion(self):
        """If a previously harvested dataset is reimported, and the
           reimport results in an ERROR_DURING_IMPORT, the package should
           have its state changed to deleted."""

        fb_dataset_dict, source, job = self._harvester_setup(
            FISBROKER_HARVESTER_CONFIG, fb_guid=INVALID_GUID)
        job.status = u'Finished'
        job.save()
        package_update(self.context, fb_dataset_dict)
        package_id = fb_dataset_dict['id']

        response = self.app.get(
            url="/api/harvest/reimport?id={}".format(package_id),
            headers={'Accept': 'application/json'},
            extra_environ={
                'REMOTE_USER': self.context['user'].encode('ascii')
            })

        # assert successful HTTP response
        _assert_equal(response.status_int, 200)
        content = json.loads(response.body)

        # assert failure marker in response JSON
        assert not content['success']
        _assert_equal(content['error']['code'], ERROR_DURING_IMPORT)
        package = Package.get(package_id)
        _assert_equal(package.state, 'deleted')
예제 #4
0
    def test_api_create_dataset(self):
        tests.call_action_api(self.app,
                              'package_create',
                              status=409,
                              name='test-name-1',
                              title="test-title-1",
                              content_type="test1,test2",
                              license_id="other",
                              notes="test notes",
                              tag_string="tag1,tag2",
                              apikey=self.sysadmin.apikey)

        tests.call_action_api(self.app,
                              'package_create',
                              status=200,
                              name='test-name-2',
                              title="test-title-2",
                              content_type="test1,test2",
                              license_id="other",
                              notes="test notes",
                              tag_string="tag1,tag2",
                              collection_type="Open Data",
                              apikey=self.sysadmin.apikey)

        test_dataset = Package.get('test-name-2')
        self.assert_equal(test_dataset.maintainer, "")
        self.assert_equal(test_dataset.maintainer_email, "")

        if not asbool(config.get('ckanext.ytp.auto_author', False)):
            self.assert_equal(test_dataset.author, "")
            self.assert_equal(test_dataset.author_email, "")
예제 #5
0
    def test_successful_reimport(self):
        '''If all is good and the FIS-Broker service returns a record,
           return an HTTP 200.'''

        fb_dataset_dict, source, job = self._harvester_setup(
            FISBROKER_HARVESTER_CONFIG)
        job.status = u'Finished'
        job.save()
        package_update(self.context, fb_dataset_dict)
        package_id = fb_dataset_dict['id']
        package = Package.get(package_id)
        old_title = package.title
        response = self.app.get(
            url="/api/harvest/reimport?id={}".format(package_id),
            headers={'Accept': 'application/json'},
            extra_environ={
                'REMOTE_USER': self.context['user'].encode('ascii')
            })
        # assert successful HTTP response
        _assert_equal(response.status_int, 200)
        content = json.loads(response.body)
        # assert success marker in resonse JSON
        assert content['success']
        # assert that title has changed to the correct value (i.e., the reimport has actually happened)
        _assert_equal(
            package.title,
            u"Nährstoffversorgung des Oberbodens 2015 (Umweltatlas) - [WFS]")
        _assert_not_equal(package.title, old_title)
예제 #6
0
 def fill_license_value(key, data, errors, context):
     license_id = data.get(('license_id', ))
     value = None
     if license_id:
         license = Package.get_license_register().get(license_id)
         if license:
             value = license[name]
     data[key] = value
예제 #7
0
 def fill_license_value(key, data, errors, context):
     license_id = data.get(('license_id',))
     value = None
     if license_id:
         license = Package.get_license_register().get(license_id)
         if license:
             value = license[name]
     data[key] = value
예제 #8
0
def package_show(context, data_dict):
    package_dict = get.package_show(context, data_dict)
    package = Package.get(package_dict['id'])
    package_dict['ratings'] = package.get_average_rating()
    # if package_dict['type'] == 'dataset':
    #     send_log(context, package_dict, 'Dataset metadata accessed',
    #              'DatasetMetadataAccessed')
    return package_dict
예제 #9
0
def package_delete(context, data_dict):
    model = context['model']
    delete.package_delete(context, data_dict)
    package = Package.get(data_dict['id'])
    package.purge()
    model.repo.commit_and_remove()
    send_dataset_log(context, data_dict, 'Dataset removed',
                     'DatasetRemoved')
    delete_de(data_dict)
    return None
예제 #10
0
    def test_create_dataset(self):
        context = self._create_context()
        data_dict = {'name': 'test_dataset_1', 'title': 'test_title', 'notes': "test_notes", 'license_id': "licence_id",
                     'content_type': "content_type_test", 'tag_string': "tag1,tag2", 'collection_type': 'Open Data',
                     'copyright_notice': 'test_notice'}

        result = toolkit.get_action('package_create')(context, data_dict)

        self.assert_equal(result['name'], 'test_dataset_1')
        test_dataset = Package.get('test_dataset_1')

        self.assert_equal(test_dataset.extras['copyright_notice'], 'test_notice')
예제 #11
0
def get_package_groups(package_id):
    context = {
        'model': model,
        'session': model.Session,
        'for_view': True,
        'use_cache': False
    }

    data_dict = {'all_fields': True, 'include_extras': True}

    groups = get_action('group_list')(context, data_dict)
    group_list = []

    try:
        pkg_obj = Package.get(package_id)
        pkg_group_ids = set(group['id'] for group in group_list_dictize(
            pkg_obj.get_groups('group', None), context))

        group_list = [
            group for group in groups if group['id'] in pkg_group_ids
        ]

        if c.user:
            context = {
                'model': model,
                'session': model.Session,
                'user': c.user,
                'for_view': True,
                'auth_user_obj': c.userobj,
                'use_cache': False,
                'is_member': True
            }

            data_dict = {'id': package_id}
            users_groups = get_action('group_list_authz')(context, data_dict)

            user_group_ids = set(group['id'] for group in users_groups)

            for group in group_list:
                group['user_member'] = (group['id'] in user_group_ids)

    except (NotFound):
        abort(404, _('Dataset not found'))

    return group_list
예제 #12
0
def get_package_dict(id, use_get_action=True):
    """
    Return package dict.
    """
    if len(id) == 0:
        id = data_qld_helpers.get_request_path().split('/')[-1]

    try:
        if use_get_action:
            return get_action('package_show')({}, {'name_or_id': id})
        else:
            pkg = Package.get(id)
            if pkg:
                return pkg.as_dict()
    except Exception as e:
        log.error(str(e))

    return {}
예제 #13
0
    def resolve_package(self, info, id):
        # if limit > 500:
        #     raise GraphQLError('The max limit value is 500')

        pkg_dict = logic.get_action('package_show')(None, {'id': id})

        if not pkg_dict:
            raise GraphQLError(f"The package with id '{id}' doesn't exists")

        if pkg_dict['relationships_as_object']:
            as_object = [
                pkg['__extras']['subject_package_id']
                for pkg in pkg_dict['relationships_as_object']
            ]

        if pkg_dict['relationships_as_subject']:
            as_subject = [
                pkg['__extras']['object_package_id']
                for pkg in pkg_dict['relationships_as_subject']
            ]
        sub_pkgs = {}
        if as_object:
            for id in as_object:
                sub_pkgs[id] = logic.get_action('package_show')(None, {
                    'id': id
                })

        return [
            Package(id=pkg_dict['id'],
                    name=pkg_dict['name'],
                    title=pkg_dict['title'],
                    url=pkg_dict['url'],
                    description=pkg_dict['notes'],
                    private=pkg_dict['private'],
                    pkg_type=pkg_dict['type'],
                    state=pkg_dict['state'],
                    created_date=pkg_dict['metadata_created'],
                    modified_date=pkg_dict['metadata_modified'],
                    license_id=pkg_dict['license_id'],
                    owner_org=pkg_dict['owner_org'],
                    child=graphene.Field(Child, default_value=sub_pkgs))
        ]
예제 #14
0
    def test_create_dataset(self):
        context = self._create_context()
        data_dict = {
            'name': 'test_dataset_1',
            'title': 'test_title',
            'notes': "test_notes",
            'license_id': "licence_id",
            'content_type': "content_type_test",
            'tag_string': "tag1,tag2",
            'collection_type': 'Open Data',
            'copyright_notice': 'test_notice'
        }

        result = toolkit.get_action('package_create')(context, data_dict)

        self.assert_equal(result['name'], 'test_dataset_1')
        test_dataset = Package.get('test_dataset_1')

        self.assert_equal(test_dataset.extras['copyright_notice'],
                          'test_notice')
예제 #15
0
    def test_api_create_dataset(self):
        tests.call_action_api(self.app, 'package_create',
                              status=409,
                              name='test-name-1',
                              title="test-title-1",
                              content_type="test1,test2",
                              license_id="other",
                              notes="test notes",
                              tag_string="tag1,tag2",
                              apikey=self.sysadmin.apikey)

        tests.call_action_api(self.app, 'package_create', status=200, name='test-name-2',
                              title="test-title-2", content_type="test1,test2",
                              license_id="other", notes="test notes", tag_string="tag1,tag2",
                              collection_type="Open Data", apikey=self.sysadmin.apikey)

        test_dataset = Package.get('test-name-2')
        self.assert_equal(test_dataset.maintainer, "")
        self.assert_equal(test_dataset.maintainer_email, "")

        if not asbool(config.get('ckanext.ytp.auto_author', False)):
            self.assert_equal(test_dataset.author, "")
            self.assert_equal(test_dataset.author_email, "")
예제 #16
0
def get_package_groups_by_type(package_id, group_type):
    context = {'model': model, 'session': model.Session,
               'for_view': True, 'use_cache': False}

    group_list = []

    data_dict = {
        'all_fields': True,
        'include_extras': True,
        'type': group_type
    }

    groups = logic.get_action('group_list')(context, data_dict)

    try:
        pkg_obj = Package.get(package_id)
        pkg_group_ids = set(group['id'] for group in group_list_dictize(pkg_obj.get_groups(group_type, None), context))
        group_list = [group
                      for group in groups if
                      group['id'] in pkg_group_ids]
    except (NotFound):
        abort(404, _('Dataset not found'))

    return group_list
    def test_mapping(self):

        # multilang requires lang to be set
        from pylons.i18n.translation import set_lang, get_lang
        import pylons
        class dummyreq(object):
            class p(object):
                translator = object()
            environ = {'pylons.pylons': p()}
        pylons.request = dummyreq()
        pylons.translator.pylons_lang = ['en_GB']
        set_lang('en_GB')
        assert get_lang() == ['en_GB']

        assert 'dcatapit_theme_group_mapper' in config['ckan.plugins'], "No dcatapit_theme_group_mapper plugin in config"
        contents = self._get_file_contents('dataset.rdf')

        p = RDFParser(profiles=['it_dcat_ap'])

        p.parse(contents)
        datasets = [d for d in p.datasets()]
        eq_(len(datasets), 1)
        package_dict = datasets[0]


        user = User.get('dummy')
        
        if not user:
            user = call_action('user_create',
                               name='dummy',
                               password='******',
                               email='*****@*****.**')
            user_name = user['name']
        else:
            user_name = user.name
        org = Group.by_name('dummy')
        if org is None:
            org  = call_action('organization_create',
                                context={'user': user_name},
                                name='dummy',
                                identifier='aaaaaa')
        existing_g = Group.by_name('existing-group')
        if existing_g is None:
            existing_g  = call_action('group_create',
                                      context={'user': user_name},
                                      name='existing-group')

        context = {'user': '******',
                   'ignore_auth': True,
                   'defer_commit': False}
        package_schema = schema.default_create_package_schema()
        context['schema'] = package_schema
        _p = {'frequency': 'manual',
              'publisher_name': 'dummy',
              'extras': [{'key':'theme', 'value':['non-mappable', 'thememap1']}],
              'groups': [],
              'title': 'dummy',
              'holder_name': 'dummy',
              'holder_identifier': 'dummy',
              'name': 'dummy',
              'notes': 'dummy',
              'owner_org': 'dummy',
              'modified': datetime.now(),
              'publisher_identifier': 'dummy',
              'metadata_created' : datetime.now(),
              'metadata_modified': datetime.now(),
              'guid': unicode(uuid.uuid4),
              'identifier': 'dummy'}
        
        package_dict.update(_p)
        config[DCATAPIT_THEME_TO_MAPPING_SOURCE] = ''
        package_data = call_action('package_create', context=context, **package_dict)

        p = Package.get(package_data['id'])

        # no groups should be assigned at this point (no map applied)
        assert {'theme': ['non-mappable', 'thememap1']} == p.extras, '{} vs {}'.format(_p['extras'], p.extras)
        assert [] == p.get_groups(group_type='group'), 'should be {}, got {}'.format([], p.get_groups(group_type='group'))

        package_data = call_action('package_show', context=context, id=package_data['id'])

        # use test mapping, which replaces thememap1 to thememap2 and thememap3
        test_map_file = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'examples', 'test_map.ini')
        config[DCATAPIT_THEME_TO_MAPPING_SOURCE] = test_map_file

        package_dict['theme'] = ['non-mappable', 'thememap1']

        expected_groups_existing = ['existing-group']
        expected_groups_new = expected_groups_existing + ['somegroup1', 'somegroup2']
        expected_groups_multi = expected_groups_new + ['othergroup']

        package_dict.pop('extras', None)
        p = Package.get(package_data['id'])
        context['package'] = p 

        package_data = call_action('package_update',
                                   context=context,
                                   **package_dict)
        
        #meta.Session.flush()
        #meta.Session.revision = repo.new_revision()

        # check - only existing group should be assigned
        p = Package.get(package_data['id'])
        groups = [g.name for g in p.get_groups(group_type='group')]

        assert expected_groups_existing == groups, (expected_groups_existing, 'vs', groups,)

        config[DCATAPIT_THEME_TO_MAPPING_ADD_NEW_GROUPS] = 'true'


        package_dict['theme'] = ['non-mappable', 'thememap1']
        package_data = call_action('package_update', context=context, **package_dict)


        meta.Session.flush()
        meta.Session.revision = repo.new_revision()

        # recheck - this time, new groups should appear
        p = Package.get(package_data['id'])
        groups = [g.name for g in p.get_groups(group_type='group')]

        assert len(expected_groups_new) == len(groups), (expected_groups_new, 'vs', groups,)
        assert set(expected_groups_new) == set(groups), (expected_groups_new, 'vs', groups,)

        package_dict['theme'] = ['non-mappable', 'thememap1', 'thememap-multi']
        package_data = call_action('package_update', context=context, **package_dict)

        meta.Session.flush()
        meta.Session.revision = repo.new_revision()

        # recheck - there should be no duplicates
        p = Package.get(package_data['id'])
        groups = [g.name for g in p.get_groups(group_type='group')]

        assert len(expected_groups_multi) == len(groups), (expected_groups_multi, 'vs', groups,)
        assert set(expected_groups_multi) == set(groups), (expected_groups_multi, 'vs', groups,)

        package_data = call_action('package_update', context=context, **package_dict)

        meta.Session.flush()
        meta.Session.revision = repo.new_revision()

        # recheck - there still should be no duplicates
        p = Package.get(package_data['id'])
        groups = [g.name for g in p.get_groups(group_type='group')]

        assert len(expected_groups_multi) == len(groups), (expected_groups_multi, 'vs', groups,)
        assert set(expected_groups_multi) == set(groups), (expected_groups_multi, 'vs', groups,)

        meta.Session.rollback()
예제 #18
0
    def dataset_collection_list(self, id):
        '''
        Display a list of collections a dataset is associated with, with an
        option to add to collection from a list.
        '''
        context = {
            'model': model,
            'session': model.Session,
            'user': c.user,
            'for_view': True,
            'auth_user_obj': c.userobj,
            'use_cache': False
        }
        data_dict = {
            'id': id,
            'type': 'collection',
            'all_fields': True,
            'include_extras': True
        }

        c.collection_list = []
        try:
            c.pkg_dict = get_action('package_show')(context, data_dict)
            pkg_obj = Package.get(data_dict['id'])
            c.collection_list = group_list_dictize(
                pkg_obj.get_groups('collection', None), context)
            dataset_type = c.pkg_dict['type'] or 'dataset'
        except (NotFound, NotAuthorized):
            abort(404, _('Dataset not found'))

        if request.method == 'POST':
            # Adding package to collection
            new_collection = request.POST.get('collection_added')
            if new_collection:
                data_dict = {
                    "id": new_collection,
                    "object": id,
                    "object_type": 'package',
                    "capacity": 'public'
                }
                try:
                    get_action('member_create')(context, data_dict)
                except NotFound:
                    abort(404, _('Collection not found'))

            removed_group = None
            for param in request.POST:
                if param.startswith('collection_remove'):
                    removed_group = param.split('.')[-1]
                    break
            if removed_group:
                data_dict = {
                    "id": removed_group,
                    "object": id,
                    "object_type": 'package'
                }

                try:
                    get_action('member_delete')(context, data_dict)
                except NotFound:
                    abort(404, _('Collection not found'))
            h.redirect_to(
                controller='ckanext.collection.controller:CollectionController',
                action='dataset_collection_list',
                id=id)

        context['am_member'] = True

        # Every collection will get listed here instead of using group_list_authz as implemented in CKAN core groups,
        # since group_list_authz does not support group type
        collections = get_action('group_list')(context, data_dict)

        pkg_group_ids = set(group['id'] for group in c.collection_list)

        context['am_member'] = True
        users_collections = get_action('group_list_authz')(context, data_dict)
        user_collection_ids = set(group['id'] for group in users_collections)

        cols = [
            collection for collection in collections
            if collection['id'] in user_collection_ids
        ]

        c.collection_list = [
            collection for collection in collections
            if collection['id'] in pkg_group_ids
        ]

        c.collection_dropdown = [[group['id'], group] for group in cols
                                 if group['id'] not in pkg_group_ids]

        for collection in c.collection_list:
            collection['user_member'] = (collection['id']
                                         in user_collection_ids)

        return render('package/collection_list.html',
                      {'dataset_type': dataset_type})
예제 #19
0
def get_package_object(package_dict):
    """Return an instance of ckan.model.package.Package for
       `package_dict` or None if there isn't one."""

    return Package.get(package_dict.get('name'))
예제 #20
0
 def resolve_child(self, info, id):
     return [
         Package(id=pkg_dict['id'], title=pkg_dict['title'])
         for pkg_dict in sub_pkgs.values()
     ]
예제 #21
0
 def before_view(self, pkg_dict):
     if pkg_dict['type'] == 'dataset':
         package = Package.get(pkg_dict['id'])
         pkg_dict['ratings'] = package.get_average_rating()
     return pkg_dict
예제 #22
0
def package_show(context, data_dict):
    package_dict = get.package_show(context, data_dict)
    package = Package.get(package_dict['id'])
    package_dict['rating'] = package.get_average_rating()
    return package_dict
예제 #23
0
    def test_theme_to_group_mapping(self):
        # multilang requires lang to be set
        # class dummyreq(object):
        #     class p(object):
        #         translator = object()
        #     environ = {'pylons.pylons': p()}

        # CKANRequest(dummyreq)
        # pylons.request = dummyreq()
        # pylons.translator.pylons_lang = ['en_GB']

        #set_lang('en_GB')
        #assert get_lang() == ['en_GB']
        assert 'dcatapit_theme_group_mapper' in config[
            'ckan.plugins'], 'No dcatapit_theme_group_mapper plugin in config'

        with open(get_example_file('dataset.rdf'), 'r') as f:
            contents = f.read()

        p = RDFParser(profiles=['it_dcat_ap'])

        p.parse(contents)
        datasets = [d for d in p.datasets()]
        self.assertEqual(len(datasets), 1)
        package_dict = datasets[0]

        user = User.get('dummy')

        if not user:
            user = call_action('user_create',
                               name='dummy',
                               password='******',
                               email='*****@*****.**')
            user_name = user['name']
        else:
            user_name = user.name
        org = Group.by_name('dummy')
        if org is None:
            org = call_action('organization_create',
                              context={'user': user_name},
                              name='dummy',
                              identifier='aaaaaa')
        existing_g = Group.by_name('existing-group')
        if existing_g is None:
            existing_g = call_action('group_create',
                                     context={'user': user_name},
                                     name='existing-group')

        context = {'user': '******', 'ignore_auth': True, 'defer_commit': False}
        package_schema = schema.default_create_package_schema()
        context['schema'] = package_schema
        _p = {
            'frequency': 'manual',
            'publisher_name': 'dummy',
            'extras': [{
                'key': 'theme',
                'value': ['non-mappable', 'thememap1']
            }],
            'groups': [],  #  [{'name':existing_g.name}],
            'title': 'dummy',
            'holder_name': 'dummy',
            'holder_identifier': 'dummy',
            'name': 'dummy-' + uuid4().hex,
            'identifier': 'dummy' + uuid4().hex,
            'notes': 'dummy',
            'owner_org': 'dummy',
            'modified': datetime.now(),
            'publisher_identifier': 'dummy',
            'metadata_created': datetime.now(),
            'metadata_modified': datetime.now(),
            'guid': str(uuid.uuid4),
        }

        package_dict.update(_p)

        config[DCATAPIT_THEME_TO_MAPPING_SOURCE] = ''
        config[DCATAPIT_THEME_TO_MAPPING_ADD_NEW_GROUPS] = 'false'

        package_data = call_action('package_create',
                                   context=context,
                                   **package_dict)

        p = Package.get(package_data['id'])

        # no groups should be assigned at this point (no map applied)
        assert {
            'theme': ['non-mappable', 'thememap1']
        } == p.extras, '{} vs {}'.format(_p['extras'], p.extras)
        assert [] == p.get_groups(
            group_type='group'), 'should be {}, got {}'.format(
                [], p.get_groups(group_type='group'))

        package_data = call_action('package_show',
                                   context=context,
                                   id=package_data['id'])

        # use test mapping, which replaces thememap1 to thememap2 and thememap3
        test_map_file = os.path.join(os.path.dirname(__file__), '..', '..',
                                     '..', 'examples', 'test_map.ini')

        config[DCATAPIT_THEME_TO_MAPPING_SOURCE] = test_map_file
        config[DCATAPIT_THEME_TO_MAPPING_ADD_NEW_GROUPS] = 'false'

        # package_dict['theme'] = ['non-mappable', 'thememap1']

        package_dict.pop('extras', None)
        p = Package.get(package_data['id'])
        context['package'] = p

        package_data = call_action('package_update',
                                   context=context,
                                   **package_dict)

        # check - only existing group should be assigned
        p = Package.get(package_data['id'])
        groups = [g.name for g in p.get_groups(group_type='group')]

        # the map file maps ECON to existing group, and 2 other unexisting groups that will not be created
        expected_groups = ['existing-group']
        self.assertSetEqual(set(expected_groups), set(groups),
                            'Error in assigned groups')

        config[DCATAPIT_THEME_TO_MAPPING_SOURCE] = test_map_file
        config[DCATAPIT_THEME_TO_MAPPING_ADD_NEW_GROUPS] = 'true'

        # package_dict['theme'] = ['non-mappable', 'thememap1']
        package_data = call_action('package_update',
                                   context=context,
                                   **package_dict)

        meta.Session.flush()

        # recheck - this time, new groups should appear
        p = Package.get(package_data['id'])
        groups = [g.name for g in p.get_groups(group_type='group')]

        # the map file maps ECON to existing group and 2 other groups that have been automatically created
        expected_groups = expected_groups + ['somegroup1', 'somegroup2']
        self.assertSetEqual(set(expected_groups), set(groups), 'Groups differ')

        # package_dict['theme'] = ['non-mappable', 'thememap1', 'thememap-multi']
        aggr = json.loads(package_dict[FIELD_THEMES_AGGREGATE])
        aggr.append({'theme': 'thememap-multi', 'subthemes': []})
        package_dict[FIELD_THEMES_AGGREGATE] = json.dumps(aggr)

        package_data = call_action('package_update',
                                   context=context,
                                   **package_dict)

        meta.Session.flush()

        # recheck - there should be no duplicates
        p = Package.get(package_data['id'])
        groups = [g.name for g in p.get_groups(group_type='group')]

        # added theme 'thememap-multi', that maps to 'othergroup' and other already exisintg groups
        expected_groups = expected_groups + ['othergroup']
        self.assertEqual(len(expected_groups), len(groups),
                         'New groups differ - there may be duplicated groups')
        self.assertSetEqual(set(expected_groups), set(groups),
                            'New groups differ')

        package_data = call_action('package_update',
                                   context=context,
                                   **package_dict)

        meta.Session.flush()

        # recheck - there still should be no duplicates
        p = Package.get(package_data['id'])
        groups = [g.name for g in p.get_groups(group_type='group')]

        self.assertEqual(len(expected_groups), len(groups),
                         'New groups differ - there may be duplicated groups')
        self.assertSetEqual(set(expected_groups), set(groups),
                            'New groups differ')

        meta.Session.rollback()