def setup(self): """ Creates a harvested UKLP dataset. """ _drop_sysadmin() self.admin = _create_sysadmin() CreateTestData.create_test_user() self.tester = "tester" CreateTestData.create_groups(_EXAMPLE_GROUPS, admin_user_name=self.tester, auth_profile="publisher") CreateTestData.flag_for_deletion(group_names=[g["name"] for g in _EXAMPLE_GROUPS]) context = { "model": ckan.model, "session": ckan.model.Session, "user": self.admin, "api_version": 2, "schema": ckan.logic.schema.default_package_schema(), } package_dict = _UKLP_DATASET.copy() self.uklp_dataset = get_action("package_create_rest")(context, package_dict) CreateTestData.flag_for_deletion(pkg_names=[self.uklp_dataset["name"]])
def setup(self): """ Creates a harvested UKLP dataset. """ _drop_sysadmin() self.admin = _create_sysadmin() CreateTestData.create_test_user() self.tester = 'tester' CreateTestData.create_groups(_EXAMPLE_GROUPS, admin_user_name=self.tester, auth_profile='publisher') CreateTestData.flag_for_deletion(group_names=[g['name'] for g in _EXAMPLE_GROUPS]) context = { 'model': ckan.model, 'session': ckan.model.Session, 'user': self.admin, 'api_version': 2, 'schema': ckan.logic.schema.default_package_schema(), } package_dict = _UKLP_DATASET.copy() self.uklp_dataset = get_action('package_create_rest')(context, package_dict) CreateTestData.flag_for_deletion(pkg_names=[self.uklp_dataset['name']])
def test_0_reload(self): # create initial package num_pkgs = count_pkgs() pkg_dict = {'name':u'pkgname0', 'title':u'Boris', 'extras':{u'ref':'boris'}} assert not model.Package.by_name(pkg_dict['name']) CreateTestData.create_arbitrary([pkg_dict]) self.tsi.index() pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # load the package with same name and ref pkg_dict = {'name':u'pkgname0', 'title':u'Boris 2', 'extras':{u'ref':'boris'}} self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # load the package with different name, same ref pkg_dict = {'name':u'pkgname0changed', 'title':u'Boris 3', 'extras':{u'ref':'boris'}} CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) self.loader.load_package(pkg_dict) assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # for now we do not support renaming pkg = model.Package.by_name(pkg_dict['name']) assert pkg is None, pkg pkg = model.Package.by_name(u'pkgname0') assert pkg assert pkg.title == pkg_dict['title'] # load the package with same name, different ref - new package other_pkg_dict = pkg_dict pkg_dict = {'name':u'pkgname0', 'title':u'Boris 4', 'extras':{u'ref':'boris-4'}} CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) self.loader.load_package(pkg_dict) assert pkg_dict['name'] == 'pkgname0_' orig_pkg = model.Package.by_name(u'pkgname0') assert orig_pkg assert orig_pkg.title == u'Boris 3' pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs)
def test_7_validate(self): # bad dates must be picked up in validation indict = _get_blank_param_dict(fs=get_fieldset()) prefix = 'Package--' pkg_name = u'test_name7' indict[prefix + 'name'] = pkg_name indict[prefix + 'title'] = u'Test' indict[prefix + 'published_by'] = u'National Health Service [1]' indict[prefix + 'notes'] = u'abcd' indict[prefix + 'license_id'] = u'abcde' indict[prefix + 'date_released'] = u'27/11/2008' fs = get_fieldset().bind(model.Package, data=indict, session=model.Session) # initially validates ok assert fs.validate(), fs.errors # now add all problems bad_validating_data = [ ('date_released', u'27/11/0208', 'out of range'), ('published_by', u'', 'Please enter a value'), ('published_via', u'Unheard of publisher', 'not one of the options'), ('national_statistic', u'yes', "'National Statistic' should only be checked if the package is " "'published by' or 'published via' the Office for National " "Statistics."), ] for field_name, bad_data, error_txt in bad_validating_data: indict[prefix + field_name] = bad_data fs = get_fieldset().bind(model.Package, data=indict, session=model.Session) # validation fails assert not fs.validate() for field_name, bad_data, error_txt in bad_validating_data: field = getattr(fs, field_name) err = fs.errors[field] assert error_txt in str( err), '%r should be in error %r' % (error_txt, err) # make sure it syncs without exception (this is req'd for a preview) CreateTestData.flag_for_deletion(pkg_name) model.repo.new_revision() fs.sync() model.repo.commit_and_remove() # now fix publisher for national_statistics validation to pass indict[prefix + 'published_via'] = 'Office for National Statistics [345]' fs = get_fieldset().bind(model.Package, data=indict) fs.validate() error_field_names = [field.name for field in fs.errors.keys()] assert 'national_statistic' not in error_field_names, fs.errors
def test_submit_package_create_form_valid(self): package_name = u'new_name' CreateTestData.flag_for_deletion(package_name) assert not self.get_package_by_name(package_name) form, ret_status = self.get_package_create_form(package_form=self.form) res = self.post_package_create_form(form=form, package_form=self.form, name=package_name, published_by='National Health Service [1]', published_via='Department of Energy and Climate Change [4]', license_id='gfdl', notes='def', title='efg') self.assert_header(res, 'Location') assert (not res.body) or (not json.loads(res.body)) self.assert_header(res, 'Location', 'http://localhost'+self.package_offset(package_name)) pkg = self.get_package_by_name(package_name) assert pkg
def setup(self): """ Create standard gov3 test fixtures for this suite. This test class won't be editing any packages, so it's ok to only create these fixtures once. """ CreateTestData.create_groups(_EXAMPLE_GROUPS, auth_profile='publisher') CreateTestData.flag_for_deletion(group_names=[g['name'] for g in _EXAMPLE_GROUPS]) self.fixtures = Gov3Fixtures() self.fixtures.create() self.admin = _create_sysadmin()
def setup(self): """ Create standard gov3 test fixtures for this suite. This test class won't be editing any packages, so it's ok to only create these fixtures once. """ CreateTestData.create_groups(_EXAMPLE_GROUPS, auth_profile="publisher") CreateTestData.flag_for_deletion(group_names=[g["name"] for g in _EXAMPLE_GROUPS]) self.fixtures = Gov3Fixtures() self.fixtures.create() self.admin = _create_sysadmin()
def test_0_simple_load(self): pkg_dict = {'name': u'pkgname', 'title': u'Boris'} assert not model.Package.by_name(pkg_dict['name']) CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) res_pkg_dict = self.loader.load_package(pkg_dict) assert res_pkg_dict pkg = model.Package.by_name(pkg_dict['name']) assert_equal_dicts(res_pkg_dict, pkg.as_dict(), only_assert_these_keys=('name', 'title')) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title']
def test_0_simple_load(self): pkg_dict = {'name':u'pkgname', 'title':u'Boris'} assert not model.Package.by_name(pkg_dict['name']) CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) res_pkg_dict = self.loader.load_package(pkg_dict) assert res_pkg_dict pkg = model.Package.by_name(pkg_dict['name']) assert_equal_dicts(res_pkg_dict, pkg.as_dict(), only_assert_these_keys=('name', 'title')) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title']
def test_7_validate(self): # bad dates must be picked up in validation indict = _get_blank_param_dict(fs=get_fieldset()) prefix = "Package--" pkg_name = u"test_name7" indict[prefix + "name"] = pkg_name indict[prefix + "title"] = u"Test" indict[prefix + "published_by"] = u"National Health Service [1]" indict[prefix + "notes"] = u"abcd" indict[prefix + "license_id"] = u"abcde" indict[prefix + "date_released"] = u"27/11/2008" fs = get_fieldset().bind(model.Package, data=indict, session=model.Session) # initially validates ok assert fs.validate(), fs.errors # now add all problems bad_validating_data = [ ("date_released", u"27/11/0208", "out of range"), ("published_by", u"", "Please enter a value"), ("published_via", u"Unheard of publisher", "not one of the options"), ( "national_statistic", u"yes", "'National Statistic' should only be checked if the package is " "'published by' or 'published via' the Office for National " "Statistics.", ), ] for field_name, bad_data, error_txt in bad_validating_data: indict[prefix + field_name] = bad_data fs = get_fieldset().bind(model.Package, data=indict, session=model.Session) # validation fails assert not fs.validate() for field_name, bad_data, error_txt in bad_validating_data: field = getattr(fs, field_name) err = fs.errors[field] assert error_txt in str(err), "%r should be in error %r" % (error_txt, err) # make sure it syncs without exception (this is req'd for a preview) CreateTestData.flag_for_deletion(pkg_name) model.repo.new_revision() fs.sync() model.repo.commit_and_remove() # now fix publisher for national_statistics validation to pass indict[prefix + "published_via"] = "Office for National Statistics [345]" fs = get_fieldset().bind(model.Package, data=indict) fs.validate() error_field_names = [field.name for field in fs.errors.keys()] assert "national_statistic" not in error_field_names, fs.errors
def test_6_sync_update_restrict(self): # create initial package pkg_name = u'test_sync_restrict' init_data = [{ 'name':pkg_name, 'title':'test_title', 'extras':{ 'notes':'Original notes', 'national_statistic':'yes', 'department':'dosac', }, }] CreateTestData.create_arbitrary(init_data) pkg = model.Package.by_name(pkg_name) assert pkg # edit it with form parameters indict = _get_blank_param_dict(pkg=pkg, fs=get_fieldset(restrict=1)) prefix = 'Package-%s-' % pkg.id indict[prefix + 'notes'] = u'some new notes' # try changing restricted params anyway new_name = u'testname4' indict[prefix + 'name'] = new_name indict[prefix + 'department'] = u'testdept' # don't supply national_statistic param at all fs = get_fieldset(restrict=1).bind(pkg, data=indict) CreateTestData.flag_for_deletion(new_name) model.repo.new_revision() fs.sync() model.repo.commit_and_remove() assert not model.Package.by_name(new_name) # unchanged outpkg = model.Package.by_name(pkg_name) # unchanged assert outpkg # test sync worked assert outpkg.notes == indict[prefix + 'notes'] # test gov fields extra_keys = outpkg.extras.keys() reqd_extras = { 'national_statistic':'yes', # unchanged 'department':init_data[0]['extras']['department'], # unchanged } for reqd_extra_key, reqd_extra_value in reqd_extras.items(): assert reqd_extra_key in extra_keys, 'Key "%s" not found in extras %r' % (reqd_extra_key, extra_keys) assert outpkg.extras[reqd_extra_key] == reqd_extra_value, \ 'Extra %s should equal %s but equals %s' % \ (reqd_extra_key, reqd_extra_value, outpkg.extras[reqd_extra_key])
def test_1_avoid_long_name_clash(self): # load the package once num_pkgs = count_pkgs() pkg_dict = { 'name': u'a' * 99, 'title': u'99 char name', 'extras': { u'ref': 'aaa' } } assert not model.Package.by_name(pkg_dict['name']) CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # load a clashing package - name appended '_' orig_pkg = pkg_dict pkg_dict = { 'name': orig_pkg['name'], 'title': u'bbb', 'extras': { u'ref': 'bbb' } } self.loader.load_package(pkg_dict) clash_name = u'a' * 99 + u'_' pkg = model.Package.by_name(clash_name) assert pkg assert pkg.title == pkg_dict['title'], pkg.title assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) # load another clashing package - name over 100 chars so shortened # and finishes '__' orig_pkg = pkg_dict pkg_dict = { 'name': orig_pkg['name'], 'title': u'ccc', 'extras': { u'ref': 'ccc' } } self.loader.load_package(pkg_dict) clash_name = u'a' * 98 + u'__' assert pkg_dict['name'] == clash_name, (pkg_dict['name'], clash_name) pkg = model.Package.by_name(clash_name) assert pkg assert pkg.title == pkg_dict['title'], pkg.title assert count_pkgs() == num_pkgs + 3, (count_pkgs() - num_pkgs)
def test_1_load_several_with_errors(self): num_pkgs = count_pkgs() pkg_dicts = [{'name':u'pkgnameA', # not allowed uppercase name 'title':u'BorisA'}, {'name':u'pkgnameB', 'title':u'BorisB'}, ] assert not model.Package.by_name(pkg_dicts[0]['name']) CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name'] for pkg_dict in pkg_dicts]) res = self.loader.load_packages(pkg_dicts) assert (res['num_loaded'], res['num_errors']) == (0, 2), \ (res['num_loaded'], res['num_errors']) assert count_pkgs() == num_pkgs, (count_pkgs() - num_pkgs) assert res['pkg_ids'] == [], res['pkg_ids']
def test_6_sync_update_restrict(self): # create initial package pkg_name = u'test_sync_restrict' init_data = [{ 'name': pkg_name, 'title': 'test_title', 'extras': { 'notes': 'Original notes', 'national_statistic': 'yes', }, }] CreateTestData.create_arbitrary(init_data) pkg = model.Package.by_name(pkg_name) assert pkg # edit it with form parameters indict = _get_blank_param_dict(pkg=pkg, fs=get_fieldset(restrict=1)) prefix = 'Package-%s-' % pkg.id indict[prefix + 'notes'] = u'some new notes' # try changing restricted params anyway new_name = u'testname3' indict[prefix + 'name'] = new_name indict[prefix + 'national_statistic'] = u'no' # don't supply national_statistic param at all fs = get_fieldset(restrict=1).bind(pkg, data=indict) CreateTestData.flag_for_deletion(new_name) model.repo.new_revision() fs.sync() model.repo.commit_and_remove() assert not model.Package.by_name(pkg_name) outpkg = model.Package.by_name(new_name) assert outpkg # test sync worked assert outpkg.notes == indict[prefix + 'notes'] # test gov fields extra_keys = outpkg.extras.keys() reqd_extras = { 'national_statistic': 'yes', # unchanged } for reqd_extra_key, reqd_extra_value in reqd_extras.items(): assert reqd_extra_key in extra_keys, 'Key "%s" not found in extras %r' % ( reqd_extra_key, extra_keys) assert outpkg.extras[reqd_extra_key] == reqd_extra_value, \ 'Extra %s should equal %s but equals %s' % \ (reqd_extra_key, reqd_extra_value, outpkg.extras[reqd_extra_key])
def test_submitting_a_valid_create_form_creates_a_new_package(self): """Assert that submitting a valid create form does indeed create a new package""" # setup fixture package_data = _EXAMPLE_INDIVIDUAL_DATA package_name = package_data["name"] CreateTestData.flag_for_deletion(package_name) assert not self.get_package_by_name(package_name), 'Package "%s" already exists' % package_name # create package via form self._form_client.post_form(package_data) # ensure it's correct pkg = self.get_package_by_name(package_name) assert pkg assert package_data["name"] == pkg.name
def test_submitting_a_valid_create_form_creates_a_new_package(self): """Assert that submitting a valid create form does indeed create a new package""" # setup fixture package_data = _EXAMPLE_INDIVIDUAL_DATA package_name = package_data['name'] CreateTestData.flag_for_deletion(package_name) assert not self.get_package_by_name(package_name),\ 'Package "%s" already exists' % package_name # create package via form self._form_client.post_form(package_data) # ensure it's correct pkg = self.get_package_by_name(package_name) assert pkg assert package_data['name'] == pkg.name
def test_6_sync_update_restrict(self): # create initial package pkg_name = u"test_sync_restrict" init_data = [ { "name": pkg_name, "title": "test_title", "extras": {"notes": "Original notes", "national_statistic": "yes"}, } ] CreateTestData.create_arbitrary(init_data) pkg = model.Package.by_name(pkg_name) assert pkg # edit it with form parameters indict = _get_blank_param_dict(pkg=pkg, fs=get_fieldset(restrict=1)) prefix = "Package-%s-" % pkg.id indict[prefix + "notes"] = u"some new notes" # try changing restricted params anyway new_name = u"testname3" indict[prefix + "name"] = new_name indict[prefix + "national_statistic"] = u"no" # don't supply national_statistic param at all fs = get_fieldset(restrict=1).bind(pkg, data=indict) CreateTestData.flag_for_deletion(new_name) model.repo.new_revision() fs.sync() model.repo.commit_and_remove() assert not model.Package.by_name(pkg_name) outpkg = model.Package.by_name(new_name) assert outpkg # test sync worked assert outpkg.notes == indict[prefix + "notes"] # test gov fields extra_keys = outpkg.extras.keys() reqd_extras = {"national_statistic": "yes"} # unchanged for reqd_extra_key, reqd_extra_value in reqd_extras.items(): assert reqd_extra_key in extra_keys, 'Key "%s" not found in extras %r' % (reqd_extra_key, extra_keys) assert outpkg.extras[reqd_extra_key] == reqd_extra_value, "Extra %s should equal %s but equals %s" % ( reqd_extra_key, reqd_extra_value, outpkg.extras[reqd_extra_key], )
def test_create_package(self): user = model.User.by_name(u'annafan') create_page = self.app.get('/apitest/form/package/create?user_id=62', extra_environ={'Authorization' : str(user.apikey)}) create_page.mustcontain('User:'******'Package--name') form = create_page.forms['test'] test_name = 'test-name' form['Package--name'] = test_name form['Package--title'] = 'test title' form['Package--notes'] = 'test notes' form['Package--license_id'] = 'mit-license' res = form.submit() CreateTestData.flag_for_deletion(test_name) pkg = model.Package.by_name(unicode(test_name)) assert pkg assert '201 Created' in res, res.body
def test_1_load_several(self): num_pkgs = count_pkgs() pkg_dicts = [{'name':u'pkgname_a', 'title':u'BorisA'}, {'name':u'pkgname_b', 'title':u'BorisB'}, ] assert not model.Package.by_name(pkg_dicts[0]['name']) CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name'] for pkg_dict in pkg_dicts]) res = self.loader.load_packages(pkg_dicts) assert (res['num_loaded'], res['num_errors']) == (2, 0), \ (res['num_loaded'], res['num_errors']) assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) for pkg_index, pkg_dict in enumerate(pkg_dicts): pkg_name = pkg_dict['name'] pkg = model.Package.by_name(pkg_name) assert pkg.id == res['pkg_ids'][pkg_index], \ '%s != %s' % (pkg.id, res['pkg_ids'][pkg_index])
def test_2_reload(self): # load the package once num_pkgs = count_pkgs() pkg_dict = {'name': u'pkgname2', 'title': u'Boris'} assert not model.Package.by_name(pkg_dict['name']) CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # load the package again pkg_dict = {'name': u'pkgname2', 'title': u'Boris Becker'} self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'], pkg.title assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs)
def test_a_full_timeseries_dataset_edit_form(self): """ Creates a new package and then checks the edit form is filled correctly. """ form_client = _PackageFormClient() package_data = _EXAMPLE_TIMESERIES_DATA.copy() package_name = package_data["name"] CreateTestData.flag_for_deletion(package_name) assert not self.get_package_by_name(package_name), 'Package "%s" already exists' % package_name # create package via form response = form_client.post_form(package_data) # GET the edit form offset = url_for(controller="package", action="edit", id=package_name) response = self.app.get(offset, extra_environ={"REMOTE_USER": self.admin}) # tags may be re-ordered, so test them manually expected_tags = set(map(lambda s: s.strip(), package_data["tag_string"].split(","))) tag_string_form_value = ( re.finditer(r'<input [^>]*id="tag_string" name="tag_string" [^>]*value="([^"]+)" />', response.body) .next() .group(1) ) actual_tags = set(map(lambda s: s.strip(), tag_string_form_value.split(","))) assert_equal(expected_tags, actual_tags) del package_data["tag_string"] # Check the notes fiels separately as it contains a newline character # in its value. And the `self.check_named_element()` method doesn't # use multi-line regular expressions. self.check_named_element( response.body.replace("\n", "__newline__"), "textarea", 'name="notes"', package_data["notes"].replace("\n", "__newline__"), ) del package_data["notes"] # Assert that the rest of the fields appear unaltered in the form for field_name, expected_value in package_data.items(): self.check_named_element(response.body, "(input|textarea|select)", 'name="%s"' % field_name, expected_value)
def test_1_load_several_with_errors(self): num_pkgs = count_pkgs() pkg_dicts = [ { 'name': u'pkgnameA', # not allowed uppercase name 'title': u'BorisA' }, { 'name': u'pkgnameB', 'title': u'BorisB' }, ] assert not model.Package.by_name(pkg_dicts[0]['name']) CreateTestData.flag_for_deletion( pkg_names=[pkg_dict['name'] for pkg_dict in pkg_dicts]) res = self.loader.load_packages(pkg_dicts) assert (res['num_loaded'], res['num_errors']) == (0, 2), \ (res['num_loaded'], res['num_errors']) assert count_pkgs() == num_pkgs, (count_pkgs() - num_pkgs) assert res['pkg_ids'] == [], res['pkg_ids']
def test_submit_package_create_form_valid(self): package_name = u'new_name' CreateTestData.flag_for_deletion(package_name) assert not self.get_package_by_name(package_name) form, ret_status = self.get_package_create_form(package_form=self.form) res = self.post_package_create_form( form=form, package_form=self.form, name=package_name, published_by='National Health Service [1]', published_via='Department of Energy and Climate Change [4]', license_id='gfdl', notes='def', title='efg') self.assert_header(res, 'Location') assert (not res.body) or (not json.loads(res.body)) self.assert_header( res, 'Location', 'http://localhost' + self.package_offset(package_name)) pkg = self.get_package_by_name(package_name) assert pkg
def test_2_reload(self): # load the package once num_pkgs = count_pkgs() pkg_dict = {'name':u'pkgname2', 'title':u'Boris'} assert not model.Package.by_name(pkg_dict['name']) CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # load the package again pkg_dict = {'name':u'pkgname2', 'title':u'Boris Becker'} self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'], pkg.title assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs)
def test_a_full_timeseries_dataset_edit_form(self): """ Creates a new package and then checks the edit form is filled correctly. """ form_client = _PackageFormClient() package_data = _EXAMPLE_TIMESERIES_DATA.copy() package_name = package_data['name'] CreateTestData.flag_for_deletion(package_name) assert not self.get_package_by_name(package_name),\ 'Package "%s" already exists' % package_name # create package via form response = form_client.post_form(package_data) # GET the edit form offset = url_for(controller='package', action='edit', id=package_name) response = self.app.get(offset, extra_environ={'REMOTE_USER': self.admin}) # tags may be re-ordered, so test them manually expected_tags = set(map(lambda s: s.strip(), package_data['tag_string'].split(','))) tag_string_form_value = re.finditer(r'<input [^>]*id="tag_string" name="tag_string" [^>]*value="([^"]+)" />', response.body).next().group(1) actual_tags = set(map(lambda s: s.strip(), tag_string_form_value.split(','))) assert_equal(expected_tags, actual_tags) del package_data['tag_string'] # Check the notes fiels separately as it contains a newline character # in its value. And the `self.check_named_element()` method doesn't # use multi-line regular expressions. self.check_named_element(response.body.replace('\n', '__newline__'), 'textarea', 'name="notes"', package_data['notes'].replace('\n', '__newline__')) del package_data['notes'] # Assert that the rest of the fields appear unaltered in the form for field_name, expected_value in package_data.items(): self.check_named_element(response.body, '(input|textarea|select)', 'name="%s"' % field_name, expected_value)
def test_1_avoid_long_name_clash(self): # load the package once num_pkgs = count_pkgs() pkg_dict = {'name':u'a'*99, 'title':u'99 char name', 'extras':{u'ref':'aaa'}} assert not model.Package.by_name(pkg_dict['name']) CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # load a clashing package - name appended '_' orig_pkg = pkg_dict pkg_dict = {'name':orig_pkg['name'], 'title':u'bbb', 'extras':{u'ref':'bbb'}} self.loader.load_package(pkg_dict) clash_name = u'a'*99 + u'_' pkg = model.Package.by_name(clash_name) assert pkg assert pkg.title == pkg_dict['title'], pkg.title assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) # load another clashing package - name over 100 chars so shortened # and finishes '__' orig_pkg = pkg_dict pkg_dict = {'name':orig_pkg['name'], 'title':u'ccc', 'extras':{u'ref':'ccc'}} self.loader.load_package(pkg_dict) clash_name = u'a'*98 + u'__' assert pkg_dict['name'] == clash_name, (pkg_dict['name'], clash_name) pkg = model.Package.by_name(clash_name) assert pkg assert pkg.title == pkg_dict['title'], pkg.title assert count_pkgs() == num_pkgs + 3, (count_pkgs() - num_pkgs)
def test_1_load_several(self): num_pkgs = count_pkgs() pkg_dicts = [ { 'name': u'pkgname_a', 'title': u'BorisA' }, { 'name': u'pkgname_b', 'title': u'BorisB' }, ] assert not model.Package.by_name(pkg_dicts[0]['name']) CreateTestData.flag_for_deletion( pkg_names=[pkg_dict['name'] for pkg_dict in pkg_dicts]) res = self.loader.load_packages(pkg_dicts) assert (res['num_loaded'], res['num_errors']) == (2, 0), \ (res['num_loaded'], res['num_errors']) assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) for pkg_index, pkg_dict in enumerate(pkg_dicts): pkg_name = pkg_dict['name'] pkg = model.Package.by_name(pkg_name) assert pkg.id == res['pkg_ids'][pkg_index], \ '%s != %s' % (pkg.id, res['pkg_ids'][pkg_index])
def setup(self): self._form_client = _PackageFormClient() CreateTestData.create_groups(_EXAMPLE_GROUPS, auth_profile="publisher") CreateTestData.flag_for_deletion(group_names=[g["name"] for g in _EXAMPLE_GROUPS])
def test_1_reload(self): # create initial package num_pkgs = count_pkgs() pkg_dict = {'name':u'pollution', 'title':u'Pollution', 'extras':{u'department':'air', u'country':'UK', #invariant u'last_updated':'Monday', #variant }, 'resources':[{'url':'pollution.com/1', 'description':'ons/id/1'}], } bogus_dict = {'name':u'bogus', 'title':u'Pollution', 'extras':{u'department':'water', u'country':'UK', u'last_updated':'Monday', }, 'resources':[{'url':'pollution.com/2', 'description':'ons/id/2'}], } assert not model.Package.by_name(pkg_dict['name']) assert not model.Package.by_name(bogus_dict['name']) CreateTestData.create_arbitrary([pkg_dict, bogus_dict]) self.tsi.index() pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 1, pkg.resources # load the similar package: same title, updated resource, # BUT synonym department pkg_dict = {'name':u'pollution', 'title':u'Pollution', 'extras':{u'department':'sky', u'country':'UK', #invariant u'last_updated':'Tuesday', #variant }, 'resources':[{'url':'pollution.com/id/1', 'description':'ons/id/1'}], } self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert pkg.extras['country'] == pkg_dict['extras']['country'] assert pkg.extras['last_updated'] == pkg_dict['extras']['last_updated'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 1, pkg.resources assert pkg.resources[0].url == pkg_dict['resources'][0]['url'], pkg.resources[0].url assert pkg.resources[0].description == pkg_dict['resources'][0]['description'], pkg.resources[0]['description'] # load the different package: because of different department pkg_dict3 = {'name':u'pollution', 'title':u'Pollution', 'extras':{u'department':'river', u'country':'UK', #invariant u'last_updated':'Tuesday', #variant }, 'resources':[{'url':'pollution.com/id/3', 'description':'Lots of pollution | ons/id/3'}], } self.loader.load_package(pkg_dict3) CreateTestData.flag_for_deletion('pollution_') assert count_pkgs() == num_pkgs + 3, (count_pkgs() - num_pkgs) pkg_names = [pkg.name for pkg in model.Session.query(model.Package).all()] pkg = model.Package.by_name(u'pollution_') assert pkg assert pkg.extras['department'] == pkg_dict3['extras']['department']
def test_0_reload(self): # create initial package num_pkgs = count_pkgs() pkg_dict = {'name':u'pollution', 'title':u'Pollution', 'extras':{u'department':'air', u'country':'UK', #invariant u'last_updated':'Monday', #variant }, 'resources':[{'url':'pollution.com/1', 'description':'ons/id/1'}], } bogus_dict = {'name':u'bogus', 'title':u'Pollution', 'extras':{u'department':'water', u'country':'UK', u'last_updated':'Monday', }, 'resources':[{'url':'pollution.com/2', 'description':'ons/id/2'}], } assert not model.Package.by_name(pkg_dict['name']) assert not model.Package.by_name(bogus_dict['name']) CreateTestData.create_arbitrary([pkg_dict, bogus_dict]) self.tsi.index() pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 1, pkg.resources # load the same package: same title, department, updated resource pkg_dict = {'name':u'pollution', 'title':u'Pollution', 'extras':{u'department':'air', u'country':'UK', #invariant u'last_updated':'Tuesday', #variant }, 'resources':[{'url':'pollution.com/id/1', 'description':'ons/id/1'}], } self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert pkg.extras['country'] == pkg_dict['extras']['country'] assert pkg.extras['last_updated'] == pkg_dict['extras']['last_updated'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 1, pkg.resources assert pkg.resources[0].url == pkg_dict['resources'][0]['url'], pkg.resources[0].url assert pkg.resources[0].description == pkg_dict['resources'][0]['description'], pkg.resources[0]['description'] # load the same package: same title, department, new resource pkg_dict2 = {'name':u'pollution', 'title':u'Pollution', 'extras':{u'department':'air', u'country':'UK', #invariant u'last_updated':'Tuesday', #variant }, 'resources':[{'url':'pollution.com/id/3', 'description':'ons/id/3'}], } self.loader.load_package(pkg_dict2) pkg = model.Package.by_name(pkg_dict2['name']) assert pkg assert pkg.name == pkg_dict2['name'] assert pkg.title == pkg_dict2['title'] assert pkg.extras['country'] == pkg_dict2['extras']['country'] assert pkg.extras['last_updated'] == pkg_dict2['extras']['last_updated'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 2, pkg.resources print pkg.resources assert_equal(pkg.resources[0].url, pkg_dict['resources'][0]['url']) assert pkg.resources[0].description == pkg_dict['resources'][0]['description'], pkg.resources[0]['description'] assert pkg.resources[1].url == pkg_dict2['resources'][0]['url'], pkg.resources[1].url assert pkg.resources[1].description == pkg_dict2['resources'][0]['description'], pkg.resources[1]['description'] # load the different package: because of different department pkg_dict3 = {'name':u'pollution', 'title':u'Pollution', 'extras':{u'department':'river', u'country':'UK', #invariant u'last_updated':'Tuesday', #variant }, 'resources':[{'url':'pollution.com/id/3', 'description':'Lots of pollution | ons/id/3'}], } self.loader.load_package(pkg_dict3) CreateTestData.flag_for_deletion('pollution_') assert count_pkgs() == num_pkgs + 3, (count_pkgs() - num_pkgs) pkg_names = [pkg.name for pkg in model.Session.query(model.Package).all()] pkg = model.Package.by_name(u'pollution_') assert pkg assert pkg.extras['department'] == pkg_dict3['extras']['department'] # load the same package: but with different country # should just get a warning pkg_dict4 = {'name':u'pollution', 'title':u'Pollution', 'extras':OrderedDict([ (u'department', 'air'), (u'country', 'UK and France'), #invariant (u'last_updated', 'Tuesday'), #variant ]), 'resources':[OrderedDict([ ('url', 'pollution.com/id/3'), ('description', 'Lots of pollution | ons/id/3'), ])], } self.loader.load_package(pkg_dict4) pkg = model.Package.by_name(pkg_dict4['name']) assert pkg assert pkg.name == pkg_dict4['name'] assert pkg.title == pkg_dict4['title'] assert pkg.extras['country'] == pkg_dict4['extras']['country'] assert pkg.extras['last_updated'] == pkg_dict4['extras']['last_updated'] assert count_pkgs() == num_pkgs + 3, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 2, pkg.resources assert pkg.resources[0].url == pkg_dict['resources'][0]['url'], pkg.resources[0].url assert pkg.resources[0].description == pkg_dict['resources'][0]['description'], pkg.resources[0]['description'] assert pkg.resources[1].url == pkg_dict4['resources'][0]['url'], pkg.resources[1].url assert pkg.resources[1].description == pkg_dict4['resources'][0]['description'], pkg.resources[1]['description']
def test_4_sync_new(self): newtagname = 'newtagname' indict = _get_blank_param_dict(fs=get_fieldset()) prefix = 'Package--' indict[prefix + 'name'] = u'testname' indict[prefix + 'title'] = u'testtitle' indict[prefix + 'notes'] = u'some new notes' indict[prefix + 'tags'] = u'russian tolstoy, ' + newtagname, indict[prefix + 'license_id'] = u'gpl-3.0' indict[prefix + 'date_released'] = u'27/11/2008' indict[prefix + 'date_updated'] = u'1/4/2008' indict[prefix + 'date_update_future'] = u'1/7/2010' indict[prefix + 'geographic_granularity'] = u'regional' indict[prefix + 'geographic_coverage-england'] = u'True' indict[prefix + 'geographic_coverage-wales'] = u'True' indict[prefix + 'temporal_granularity'] = u'year' indict[prefix + 'temporal_coverage-from'] = u'6/2008' indict[prefix + 'temporal_coverage-to'] = u'6/2009' indict[prefix + 'national_statistic'] = u'True' indict[prefix + 'precision'] = u'Nearest 1000' indict[prefix + 'taxonomy_url'] = u'http:/somewhere/about.html' indict[prefix + 'published_by'] = 'Ealing PCT [2]' indict[prefix + 'published_via'] = 'Department for Education [3]' indict[prefix + 'agency'] = u'Quango 1' indict[prefix + 'resources-0-url'] = u'http:/1' indict[prefix + 'resources-0-format'] = u'xml' indict[prefix + 'resources-0-description'] = u'test desc' fs = get_fieldset().bind(model.Package, data=indict, session=model.Session) CreateTestData.flag_for_deletion( pkg_names=[u'testname'], tag_names=[u'russian', u'tolstoy'], ) model.repo.new_revision() assert fs.validate() fs.sync() model.repo.commit_and_remove() outpkg = model.Package.by_name(u'testname') assert outpkg.title == indict[prefix + 'title'] assert outpkg.notes == indict[prefix + 'notes'] # test tags taglist = [tag.name for tag in outpkg.tags] assert u'russian' in taglist, taglist assert u'tolstoy' in taglist, taglist assert newtagname in taglist # test licenses assert outpkg.license_id, outpkg assert outpkg.license, outpkg assert_equal(indict[prefix + 'license_id'], outpkg.license.id) # test resources assert len(outpkg.resources) == 1, outpkg.resources res = outpkg.resources[0] assert res.url == u'http:/1', res.url assert res.description == u'test desc', res.description assert res.format == u'xml', res.format # test gov fields extra_keys = outpkg.extras.keys() reqd_extras = { 'date_released': '2008-11-27', 'date_updated': '2008-04-01', 'date_update_future': u'2010-07-01', 'geographic_granularity': indict[prefix + 'geographic_granularity'], 'geographic_coverage': '101000: England, Wales', 'temporal_granularity': indict[prefix + 'temporal_granularity'], 'temporal_coverage-from': '2008-06', 'temporal_coverage-to': '2009-06', 'national_statistic': 'yes', 'precision': indict[prefix + 'precision'], 'taxonomy_url': indict[prefix + 'taxonomy_url'], 'published_by': indict[prefix + 'published_by'], 'published_via': indict[prefix + 'published_via'], } for reqd_extra_key, reqd_extra_value in reqd_extras.items(): assert reqd_extra_key in extra_keys, 'Key "%s" not found in extras %r' % ( reqd_extra_key, extra_keys) assert outpkg.extras[reqd_extra_key] == reqd_extra_value, \ 'Extra \'%s\' should equal \'%s\' but equals \'%s\'' % \ (reqd_extra_key, reqd_extra_value, outpkg.extras[reqd_extra_key])
def test_a_full_timeseries_dataset(self): """ Tests the submission of a fully-completed timeseries dataset. """ package_data = _EXAMPLE_TIMESERIES_DATA package_name = package_data["name"] CreateTestData.flag_for_deletion(package_name) assert not self.get_package_by_name(package_name), 'Package "%s" already exists' % package_name # create package via form response = self._form_client.post_form(package_data) # ensure it's correct pkg = self.get_package_by_name(package_name) assert pkg, response.body assert_equal(package_data["title"], pkg.title) assert_equal(package_data["notes"], pkg.notes) publisher = pkg.as_dict()["groups"][0] assert_equal(package_data["groups__0__name"], publisher) # Extra data # Timeseries data expected_timeseries_keys = filter(lambda k: k.startswith("timeseries_resources"), package_data.keys()) timeseries_resources = ckanext.dgu.lib.helpers.timeseries_resources(pkg.as_dict()) assert_equal(len(timeseries_resources), 4) for key in expected_timeseries_keys: index, field = key.split("__")[1:] index = int(index) assert_equal(package_data[key], timeseries_resources[index][field]) # Publisher / contact details # The contact-email should not be an extra-field on the dataset as it's the # same as the publisher group's contact-email. ie - it hasn't been overridden. # The resof the information should be in the extras fields assert_not_in("contact-email", pkg.extras) assert_equal(package_data["contact-name"], pkg.extras["contact-name"]) assert_equal(package_data["contact-phone"], pkg.extras["contact-phone"]) assert_equal(package_data["foi-name"], pkg.extras["foi-name"]) assert_equal(package_data["foi-email"], pkg.extras["foi-email"]) assert_equal(package_data["foi-phone"], pkg.extras["foi-phone"]) # Themes and tags assert_equal(package_data["theme-primary"], pkg.extras["theme-primary"]) assert_equal(set(package_data["theme-secondary"]), set(pkg.extras["theme-secondary"])) # Health and Education are from the primary and secondary themes, which # end up in the tags assert_equal( set(["tag1", "tag2", "a multi word tag", "Health", "Education"]), set(tag.name for tag in pkg.get_tags()) ) # Additional resources expected_additional_keys = filter(lambda k: k.startswith("additional_resources"), package_data.keys()) additional_resources = ckanext.dgu.lib.helpers.additional_resources(pkg.as_dict()) assert_equal(len(additional_resources), 2) for key in expected_additional_keys: index, field = key.split("__")[1:] index = int(index) assert_equal(package_data[key], additional_resources[index][field]) assert_equal(package_data["mandate"], pkg.extras["mandate"]) assert_equal(package_data["access_constraints"], pkg.license_id) assert_equal(package_data["temporal_coverage-from"], DateType.db_to_form(pkg.extras["temporal_coverage-from"])) assert_equal(package_data["temporal_coverage-to"], DateType.db_to_form(pkg.extras["temporal_coverage-to"])) assert_in("England", pkg.extras["geographic_coverage"])
def test_0_reload(self): # create initial package num_pkgs = count_pkgs() pkg_dict = { 'name': u'pkgname0', 'title': u'Boris', 'extras': { u'ref': 'boris' } } assert not model.Package.by_name(pkg_dict['name']) CreateTestData.create_arbitrary([pkg_dict]) self.tsi.index() pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # load the package with same name and ref pkg_dict = { 'name': u'pkgname0', 'title': u'Boris 2', 'extras': { u'ref': 'boris' } } self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # load the package with different name, same ref pkg_dict = { 'name': u'pkgname0changed', 'title': u'Boris 3', 'extras': { u'ref': 'boris' } } CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) self.loader.load_package(pkg_dict) assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # for now we do not support renaming pkg = model.Package.by_name(pkg_dict['name']) assert pkg is None, pkg pkg = model.Package.by_name(u'pkgname0') assert pkg assert pkg.title == pkg_dict['title'] # load the package with same name, different ref - new package other_pkg_dict = pkg_dict pkg_dict = { 'name': u'pkgname0', 'title': u'Boris 4', 'extras': { u'ref': 'boris-4' } } CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) self.loader.load_package(pkg_dict) assert pkg_dict['name'] == 'pkgname0_' orig_pkg = model.Package.by_name(u'pkgname0') assert orig_pkg assert orig_pkg.title == u'Boris 3' pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs)
def test_0_reload(self): # create initial package num_pkgs = count_pkgs() pkg_dict = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'air', u'country': 'UK', #invariant u'last_updated': 'Monday', #variant }, 'resources': [{ 'url': 'pollution.com/1', 'description': 'ons/id/1' }], } bogus_dict = { 'name': u'bogus', 'title': u'Pollution', 'extras': { u'department': 'water', u'country': 'UK', u'last_updated': 'Monday', }, 'resources': [{ 'url': 'pollution.com/2', 'description': 'ons/id/2' }], } assert not model.Package.by_name(pkg_dict['name']) assert not model.Package.by_name(bogus_dict['name']) CreateTestData.create_arbitrary([pkg_dict, bogus_dict]) self.tsi.index() pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 1, pkg.resources # load the same package: same title, department, updated resource pkg_dict = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'air', u'country': 'UK', #invariant u'last_updated': 'Tuesday', #variant }, 'resources': [{ 'url': 'pollution.com/id/1', 'description': 'ons/id/1' }], } self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert pkg.extras['country'] == pkg_dict['extras']['country'] assert pkg.extras['last_updated'] == pkg_dict['extras']['last_updated'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 1, pkg.resources assert pkg.resources[0].url == pkg_dict['resources'][0][ 'url'], pkg.resources[0].url assert pkg.resources[0].description == pkg_dict['resources'][0][ 'description'], pkg.resources[0]['description'] # load the same package: same title, department, new resource pkg_dict2 = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'air', u'country': 'UK', #invariant u'last_updated': 'Tuesday', #variant }, 'resources': [{ 'url': 'pollution.com/id/3', 'description': 'ons/id/3' }], } self.loader.load_package(pkg_dict2) pkg = model.Package.by_name(pkg_dict2['name']) assert pkg assert pkg.name == pkg_dict2['name'] assert pkg.title == pkg_dict2['title'] assert pkg.extras['country'] == pkg_dict2['extras']['country'] assert pkg.extras['last_updated'] == pkg_dict2['extras'][ 'last_updated'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 2, pkg.resources print pkg.resources assert_equal(pkg.resources[0].url, pkg_dict['resources'][0]['url']) assert pkg.resources[0].description == pkg_dict['resources'][0][ 'description'], pkg.resources[0]['description'] assert pkg.resources[1].url == pkg_dict2['resources'][0][ 'url'], pkg.resources[1].url assert pkg.resources[1].description == pkg_dict2['resources'][0][ 'description'], pkg.resources[1]['description'] # load the different package: because of different department pkg_dict3 = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'river', u'country': 'UK', #invariant u'last_updated': 'Tuesday', #variant }, 'resources': [{ 'url': 'pollution.com/id/3', 'description': 'Lots of pollution | ons/id/3' }], } self.loader.load_package(pkg_dict3) CreateTestData.flag_for_deletion('pollution_') assert count_pkgs() == num_pkgs + 3, (count_pkgs() - num_pkgs) pkg_names = [ pkg.name for pkg in model.Session.query(model.Package).all() ] pkg = model.Package.by_name(u'pollution_') assert pkg assert pkg.extras['department'] == pkg_dict3['extras']['department'] # load the same package: but with different country # should just get a warning pkg_dict4 = { 'name': u'pollution', 'title': u'Pollution', 'extras': OrderedDict([ (u'department', 'air'), (u'country', 'UK and France'), #invariant (u'last_updated', 'Tuesday'), #variant ]), 'resources': [ OrderedDict([ ('url', 'pollution.com/id/3'), ('description', 'Lots of pollution | ons/id/3'), ]) ], } self.loader.load_package(pkg_dict4) pkg = model.Package.by_name(pkg_dict4['name']) assert pkg assert pkg.name == pkg_dict4['name'] assert pkg.title == pkg_dict4['title'] assert pkg.extras['country'] == pkg_dict4['extras']['country'] assert pkg.extras['last_updated'] == pkg_dict4['extras'][ 'last_updated'] assert count_pkgs() == num_pkgs + 3, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 2, pkg.resources assert pkg.resources[0].url == pkg_dict['resources'][0][ 'url'], pkg.resources[0].url assert pkg.resources[0].description == pkg_dict['resources'][0][ 'description'], pkg.resources[0]['description'] assert pkg.resources[1].url == pkg_dict4['resources'][0][ 'url'], pkg.resources[1].url assert pkg.resources[1].description == pkg_dict4['resources'][0][ 'description'], pkg.resources[1]['description']
def test_a_full_timeseries_dataset(self): """ Tests the submission of a fully-completed timeseries dataset. """ package_data = _EXAMPLE_TIMESERIES_DATA package_name = package_data['name'] CreateTestData.flag_for_deletion(package_name) assert not self.get_package_by_name(package_name),\ 'Package "%s" already exists' % package_name # create package via form response = self._form_client.post_form(package_data) # ensure it's correct pkg = self.get_package_by_name(package_name) assert pkg, response.body assert_equal(package_data['title'], pkg.title) assert_equal(package_data['notes'], pkg.notes) publisher = pkg.as_dict()['groups'][0] assert_equal(package_data['groups__0__name'], publisher) # Extra data # Timeseries data expected_timeseries_keys = filter(lambda k: k.startswith('timeseries_resources'), package_data.keys()) timeseries_resources = ckanext.dgu.lib.helpers.timeseries_resources(pkg.as_dict()) assert_equal(len(timeseries_resources), 4) for key in expected_timeseries_keys: index, field = key.split('__')[1:] index = int(index) assert_equal(package_data[key], timeseries_resources[index][field]) # Publisher / contact details # The contact-email should not be an extra-field on the dataset as it's the # same as the publisher group's contact-email. ie - it hasn't been overridden. # The resof the information should be in the extras fields assert_not_in('contact-email', pkg.extras) assert_equal(package_data['contact-name'], pkg.extras['contact-name']) assert_equal(package_data['contact-phone'], pkg.extras['contact-phone']) assert_equal(package_data['foi-name'], pkg.extras['foi-name']) assert_equal(package_data['foi-email'], pkg.extras['foi-email']) assert_equal(package_data['foi-phone'], pkg.extras['foi-phone']) # Themes and tags assert_equal(package_data['theme-primary'], pkg.extras['theme-primary']) assert_equal(set(package_data['theme-secondary']), set(pkg.extras['theme-secondary'])) # Health and Education are from the primary and secondary themes, which # end up in the tags assert_equal(set(['tag1', 'tag2', 'a multi word tag', 'Health', 'Education']), set(tag.name for tag in pkg.get_tags())) # Additional resources expected_additional_keys = filter(lambda k: k.startswith('additional_resources'), package_data.keys()) additional_resources = ckanext.dgu.lib.helpers.additional_resources(pkg.as_dict()) assert_equal(len(additional_resources), 2) for key in expected_additional_keys: index, field = key.split('__')[1:] index = int(index) assert_equal(package_data[key], additional_resources[index][field]) assert_equal(package_data['mandate'], pkg.extras['mandate']) assert_equal(package_data['access_constraints'], pkg.license_id) assert_equal(package_data['temporal_coverage-from'], DateType.db_to_form(pkg.extras['temporal_coverage-from'])) assert_equal(package_data['temporal_coverage-to'], DateType.db_to_form(pkg.extras['temporal_coverage-to'])) assert_in('England', pkg.extras['geographic_coverage'])
def test_1_reload(self): # create initial package num_pkgs = count_pkgs() pkg_dict = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'air', u'country': 'UK', #invariant u'last_updated': 'Monday', #variant }, 'resources': [{ 'url': 'pollution.com/1', 'description': 'ons/id/1' }], } bogus_dict = { 'name': u'bogus', 'title': u'Pollution', 'extras': { u'department': 'water', u'country': 'UK', u'last_updated': 'Monday', }, 'resources': [{ 'url': 'pollution.com/2', 'description': 'ons/id/2' }], } assert not model.Package.by_name(pkg_dict['name']) assert not model.Package.by_name(bogus_dict['name']) CreateTestData.create_arbitrary([pkg_dict, bogus_dict]) self.tsi.index() pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 1, pkg.resources # load the similar package: same title, updated resource, # BUT synonym department pkg_dict = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'sky', u'country': 'UK', #invariant u'last_updated': 'Tuesday', #variant }, 'resources': [{ 'url': 'pollution.com/id/1', 'description': 'ons/id/1' }], } self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert pkg.extras['country'] == pkg_dict['extras']['country'] assert pkg.extras['last_updated'] == pkg_dict['extras']['last_updated'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 1, pkg.resources assert pkg.resources[0].url == pkg_dict['resources'][0][ 'url'], pkg.resources[0].url assert pkg.resources[0].description == pkg_dict['resources'][0][ 'description'], pkg.resources[0]['description'] # load the different package: because of different department pkg_dict3 = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'river', u'country': 'UK', #invariant u'last_updated': 'Tuesday', #variant }, 'resources': [{ 'url': 'pollution.com/id/3', 'description': 'Lots of pollution | ons/id/3' }], } self.loader.load_package(pkg_dict3) CreateTestData.flag_for_deletion('pollution_') assert count_pkgs() == num_pkgs + 3, (count_pkgs() - num_pkgs) pkg_names = [ pkg.name for pkg in model.Session.query(model.Package).all() ] pkg = model.Package.by_name(u'pollution_') assert pkg assert pkg.extras['department'] == pkg_dict3['extras']['department']
def test_5_sync_update(self): # create initial package init_data = [ { "name": "test_sync", "title": "test_title", "extras": { "external_reference": "ref123", "date_released": "2008-11-28", "date_updated": "2008-04-01", "date_update_future": "1/7/2009", "geographic_granularity": "testgran", "geographic_coverage": "111000: England, Scotland, Wales", "temporal_granularity": "testtempgran", "temporal_coverage-from": "2007-01-08", "temporal_coverage-to": "2007-01-09", "national_statistic": "yes", "precision": "testprec", "taxonomy_url": "testtaxurl", "published_by": "Ealing PCT [2]", "published_via": "Department for Education [3]", }, } ] CreateTestData.create_arbitrary(init_data) pkg = model.Package.by_name(u"test_sync") assert pkg # edit it with form parameters indict = _get_blank_param_dict(pkg=pkg, fs=get_fieldset()) prefix = "Package-%s-" % pkg.id indict[prefix + "name"] = u"testname2" indict[prefix + "notes"] = u"some new notes" indict[prefix + "tags"] = (u"russian, tolstoy",) indict[prefix + "license_id"] = u"gpl-3.0" indict[prefix + "date_released"] = u"27/11/2008" indict[prefix + "date_updated"] = u"1/4/2008" indict[prefix + "date_update_future"] = u"1/8/2010" indict[prefix + "geographic_granularity"] = u"regional" indict[prefix + "geographic_coverage-england"] = u"True" indict[prefix + "geographic_coverage-wales"] = u"True" indict[prefix + "temporal_granularity"] = u"year" indict[prefix + "temporal_coverage-from"] = u"6/2008" indict[prefix + "temporal_coverage-to"] = u"6/2009" indict[prefix + "national_statistic"] = u"True" indict[prefix + "precision"] = u"Nearest 1000" indict[prefix + "taxonomy_url"] = u"http:/somewhere/about.html" indict[prefix + "published_by"] = u"Department of Energy and Climate Change [4]" indict[prefix + "published_via"] = u"National Health Service [1]" indict[prefix + "resources-0-url"] = u"http:/1" indict[prefix + "resources-0-format"] = u"xml" indict[prefix + "resources-0-description"] = u"test desc" fs = get_fieldset().bind(pkg, data=indict) CreateTestData.flag_for_deletion(u"testname2") model.repo.new_revision() fs.sync() model.repo.commit_and_remove() outpkg = model.Package.by_name(u"testname2") assert outpkg.notes == indict[prefix + "notes"] # test tags taglist = [tag.name for tag in outpkg.tags] assert u"russian" in taglist, taglist assert u"tolstoy" in taglist, taglist # test licenses assert outpkg.license assert indict[prefix + "license_id"] == outpkg.license.id, outpkg.license.id # test resources assert len(outpkg.resources) == 1, outpkg.resources res = outpkg.resources[0] assert res.url == u"http:/1", res.url assert res.description == u"test desc", res.description assert res.format == u"xml", res.format # test gov fields extra_keys = outpkg.extras.keys() reqd_extras = { "date_released": "2008-11-27", "date_updated": "2008-04-01", "date_update_future": "2010-08-01", "geographic_granularity": indict[prefix + "geographic_granularity"], "geographic_coverage": "101000: England, Wales", "temporal_granularity": indict[prefix + "temporal_granularity"], "temporal_coverage-from": "2008-06", "temporal_coverage-to": "2009-06", "national_statistic": "yes", "precision": indict[prefix + "precision"], "taxonomy_url": indict[prefix + "taxonomy_url"], "published_by": indict[prefix + "published_by"], "published_via": indict[prefix + "published_via"], } for reqd_extra_key, reqd_extra_value in reqd_extras.items(): assert reqd_extra_key in extra_keys, 'Key "%s" not found in extras %r' % (reqd_extra_key, extra_keys) assert outpkg.extras[reqd_extra_key] == reqd_extra_value, "Extra %s should equal %s but equals %s" % ( reqd_extra_key, reqd_extra_value, outpkg.extras[reqd_extra_key], )
def test_4_sync_new(self): newtagname = "newtagname" indict = _get_blank_param_dict(fs=get_fieldset()) prefix = "Package--" indict[prefix + "name"] = u"testname" indict[prefix + "title"] = u"testtitle" indict[prefix + "notes"] = u"some new notes" indict[prefix + "tags"] = (u"russian tolstoy, " + newtagname,) indict[prefix + "license_id"] = u"gpl-3.0" indict[prefix + "date_released"] = u"27/11/2008" indict[prefix + "date_updated"] = u"1/4/2008" indict[prefix + "date_update_future"] = u"1/7/2010" indict[prefix + "geographic_granularity"] = u"regional" indict[prefix + "geographic_coverage-england"] = u"True" indict[prefix + "geographic_coverage-wales"] = u"True" indict[prefix + "temporal_granularity"] = u"year" indict[prefix + "temporal_coverage-from"] = u"6/2008" indict[prefix + "temporal_coverage-to"] = u"6/2009" indict[prefix + "national_statistic"] = u"True" indict[prefix + "precision"] = u"Nearest 1000" indict[prefix + "taxonomy_url"] = u"http:/somewhere/about.html" indict[prefix + "published_by"] = "Ealing PCT [2]" indict[prefix + "published_via"] = "Department for Education [3]" indict[prefix + "agency"] = u"Quango 1" indict[prefix + "resources-0-url"] = u"http:/1" indict[prefix + "resources-0-format"] = u"xml" indict[prefix + "resources-0-description"] = u"test desc" fs = get_fieldset().bind(model.Package, data=indict, session=model.Session) CreateTestData.flag_for_deletion(pkg_names=[u"testname"], tag_names=[u"russian", u"tolstoy"]) model.repo.new_revision() assert fs.validate() fs.sync() model.repo.commit_and_remove() outpkg = model.Package.by_name(u"testname") assert outpkg.title == indict[prefix + "title"] assert outpkg.notes == indict[prefix + "notes"] # test tags taglist = [tag.name for tag in outpkg.tags] assert u"russian" in taglist, taglist assert u"tolstoy" in taglist, taglist assert newtagname in taglist # test licenses assert outpkg.license_id, outpkg assert outpkg.license, outpkg assert_equal(indict[prefix + "license_id"], outpkg.license.id) # test resources assert len(outpkg.resources) == 1, outpkg.resources res = outpkg.resources[0] assert res.url == u"http:/1", res.url assert res.description == u"test desc", res.description assert res.format == u"xml", res.format # test gov fields extra_keys = outpkg.extras.keys() reqd_extras = { "date_released": "2008-11-27", "date_updated": "2008-04-01", "date_update_future": u"2010-07-01", "geographic_granularity": indict[prefix + "geographic_granularity"], "geographic_coverage": "101000: England, Wales", "temporal_granularity": indict[prefix + "temporal_granularity"], "temporal_coverage-from": "2008-06", "temporal_coverage-to": "2009-06", "national_statistic": "yes", "precision": indict[prefix + "precision"], "taxonomy_url": indict[prefix + "taxonomy_url"], "published_by": indict[prefix + "published_by"], "published_via": indict[prefix + "published_via"], } for reqd_extra_key, reqd_extra_value in reqd_extras.items(): assert reqd_extra_key in extra_keys, 'Key "%s" not found in extras %r' % (reqd_extra_key, extra_keys) assert outpkg.extras[reqd_extra_key] == reqd_extra_value, "Extra '%s' should equal '%s' but equals '%s'" % ( reqd_extra_key, reqd_extra_value, outpkg.extras[reqd_extra_key], )
def setup(self): self._form_client = _PackageFormClient() CreateTestData.create_groups(_EXAMPLE_GROUPS, auth_profile='publisher') CreateTestData.flag_for_deletion(group_names=[g['name'] for g in _EXAMPLE_GROUPS])
def test_5_sync_update(self): # create initial package init_data = [{ 'name': 'test_sync', 'title': 'test_title', 'extras': { 'external_reference': 'ref123', 'date_released': '2008-11-28', 'date_updated': '2008-04-01', 'date_update_future': '1/7/2009', 'geographic_granularity': 'testgran', 'geographic_coverage': '111000: England, Scotland, Wales', 'temporal_granularity': 'testtempgran', 'temporal_coverage-from': '2007-01-08', 'temporal_coverage-to': '2007-01-09', 'national_statistic': 'yes', 'precision': 'testprec', 'taxonomy_url': 'testtaxurl', 'published_by': 'Ealing PCT [2]', 'published_via': 'Department for Education [3]', }, }] CreateTestData.create_arbitrary(init_data) pkg = model.Package.by_name(u'test_sync') assert pkg # edit it with form parameters indict = _get_blank_param_dict(pkg=pkg, fs=get_fieldset()) prefix = 'Package-%s-' % pkg.id indict[prefix + 'name'] = u'testname2' indict[prefix + 'notes'] = u'some new notes' indict[prefix + 'tags'] = u'russian, tolstoy', indict[prefix + 'license_id'] = u'gpl-3.0' indict[prefix + 'date_released'] = u'27/11/2008' indict[prefix + 'date_updated'] = u'1/4/2008' indict[prefix + 'date_update_future'] = u'1/8/2010' indict[prefix + 'geographic_granularity'] = u'regional' indict[prefix + 'geographic_coverage-england'] = u'True' indict[prefix + 'geographic_coverage-wales'] = u'True' indict[prefix + 'temporal_granularity'] = u'year' indict[prefix + 'temporal_coverage-from'] = u'6/2008' indict[prefix + 'temporal_coverage-to'] = u'6/2009' indict[prefix + 'national_statistic'] = u'True' indict[prefix + 'precision'] = u'Nearest 1000' indict[prefix + 'taxonomy_url'] = u'http:/somewhere/about.html' indict[prefix + 'published_by'] = u'Department of Energy and Climate Change [4]' indict[prefix + 'published_via'] = u'National Health Service [1]' indict[prefix + 'resources-0-url'] = u'http:/1' indict[prefix + 'resources-0-format'] = u'xml' indict[prefix + 'resources-0-description'] = u'test desc' fs = get_fieldset().bind(pkg, data=indict) CreateTestData.flag_for_deletion(u'testname2') model.repo.new_revision() fs.sync() model.repo.commit_and_remove() outpkg = model.Package.by_name(u'testname2') assert outpkg.notes == indict[prefix + 'notes'] # test tags taglist = [tag.name for tag in outpkg.tags] assert u'russian' in taglist, taglist assert u'tolstoy' in taglist, taglist # test licenses assert outpkg.license assert indict[prefix + 'license_id'] == outpkg.license.id, outpkg.license.id # test resources assert len(outpkg.resources) == 1, outpkg.resources res = outpkg.resources[0] assert res.url == u'http:/1', res.url assert res.description == u'test desc', res.description assert res.format == u'xml', res.format # test gov fields extra_keys = outpkg.extras.keys() reqd_extras = { 'date_released': '2008-11-27', 'date_updated': '2008-04-01', 'date_update_future': '2010-08-01', 'geographic_granularity': indict[prefix + 'geographic_granularity'], 'geographic_coverage': '101000: England, Wales', 'temporal_granularity': indict[prefix + 'temporal_granularity'], 'temporal_coverage-from': '2008-06', 'temporal_coverage-to': '2009-06', 'national_statistic': 'yes', 'precision': indict[prefix + 'precision'], 'taxonomy_url': indict[prefix + 'taxonomy_url'], 'published_by': indict[prefix + 'published_by'], 'published_via': indict[prefix + 'published_via'], } for reqd_extra_key, reqd_extra_value in reqd_extras.items(): assert reqd_extra_key in extra_keys, 'Key "%s" not found in extras %r' % ( reqd_extra_key, extra_keys) assert outpkg.extras[reqd_extra_key] == reqd_extra_value, \ 'Extra %s should equal %s but equals %s' % \ (reqd_extra_key, reqd_extra_value, outpkg.extras[reqd_extra_key])