def _reset_data(self): model.Session.remove() model.repo.rebuild_db() CreateTestData.create() CreateTestData.create_arbitrary( {'name':self.editpkg_name, 'url':u'editpkgurl.com', 'tags':[u'mytesttag'], 'resources':[{'url':u'url escape: & umlaut: \xfc quote: "', 'description':u'description escape: & umlaut: \xfc quote "', }], 'admins':[u'testadmin'], }) self.editpkg = model.Package.by_name(self.editpkg_name) self.pkgid = self.editpkg.id self.offset = url_for(controller='package', action='edit', id=self.editpkg_name) self.editpkg = model.Package.by_name(self.editpkg_name) self.admin = model.User.by_name(u'testsysadmin') self.extra_environ_admin = {'REMOTE_USER': self.admin.name.encode('utf8')} self.extra_environ_russianfan = {'REMOTE_USER': '******'} self.res = None #get's refreshed by setup model.Session.remove()
def test_new_package_without_resources(self): self._mark_the_time() CreateTestData.create_arbitrary({'name': 'testpkg'}) pkg = model.Package.get('testpkg') assert pkg assert self._has_modification_time_been_updated_since_the_mark(pkg) self._assert_post_determined_modification_time_is_correct(pkg)
def test_mapper_plugin_fired(self): config['ckan.plugins'] = 'mapper_plugin' plugins.load_all(config) CreateTestData.create_arbitrary([{'name':u'testpkg'}]) mapper_plugin = PluginGlobals.env().plugin_registry['MapperPlugin'].__instance__ assert len(mapper_plugin.added) == 2 # resource group table added automatically assert mapper_plugin.added[0].name == 'testpkg'
def test_8_geo_coverage(self): # create initial package pkg_name = u"test_coverage" init_data = [ { "name": pkg_name, "title": "test_title", "extras": {"geographic_coverage": "001000: England, Scotland, Wales"}, } ] CreateTestData.create_arbitrary(init_data) pkg = model.Package.by_name(pkg_name) assert pkg # edit it with form parameters fs = get_fieldset() indict = ckan.forms.get_package_dict(pkg, fs=fs) prefix = "Package-%s-" % pkg.id indict[prefix + "geographic_coverage-england"] = u"True" indict[prefix + "geographic_coverage-wales"] = u"True" indict[prefix + "geographic_coverage-scotland"] = u"True" indict[prefix + "geographic_coverage-global"] = u"True" fs = fs.bind(pkg, data=indict) model.repo.new_revision() fs.sync() model.repo.commit_and_remove() outpkg = model.Package.by_name(pkg_name) assert_equal(outpkg.extras["geographic_coverage"], "111001: Global, Great Britain (England, Scotland, Wales)")
def setup_class(self): CreateTestData.create() self.ab = 'http://site.com/a/b.txt' self.cd = 'http://site.com/c/d.txt' self.package_fixture_data = { 'name' : u'testpkg', 'title': 'Some Title', 'url': u'http://blahblahblah.mydomain', 'resources':[ {'url':self.ab, 'description':'This is site ab.', 'format':'Excel spreadsheet', 'alt_url':'alt', 'extras':{'size':'100'}, 'hash':'abc-123'}, {'url':self.cd, 'description':'This is site cd.', 'format':'Office spreadsheet', 'alt_url':'alt', 'extras':{'size':'100'}, 'hash':'qwe-456'}, ], 'tags': ['russion', 'novel'], 'license_id': u'gpl-3.0', 'extras': {'national_statistic':'yes', 'geographic_coverage':'England, Wales'}, } CreateTestData.create_arbitrary(self.package_fixture_data) self.base_url = self.offset('/search/resource')
def test_mapper_plugin_fired_on_insert(self): with plugins.use_plugin('mapper_plugin') as mapper_plugin: CreateTestData.create_arbitrary([{'name': u'testpkg'}]) assert mapper_plugin.calls == [ ('before_insert', 'testpkg'), ('after_insert', 'testpkg'), ]
def test_2_field_publisher_none(self): # Create package CreateTestData.create_arbitrary({"name": u"test2", "title": u"Test2", "license": u"odc-pddl", "notes": u"some"}) pkg = model.Package.by_name(u"test2") fs = get_fieldset() fs = fs.bind(pkg) out = fs.render() assert out for field, should_have_null_value in [(fs.published_by, False), (fs.published_via, True)]: pub_options = field.render() pub_options_readonly = field.render_readonly() assert "<select" in pub_options, pub_options assert_equal( ('<option selected="selected" value="">(None)</option>' in pub_options), should_have_null_value, "%s %r" % (field, pub_options), ) if should_have_null_value: # published_by field is blank anyway because no value set. assert_equal("<p></p>", pub_options_readonly, "%s %r" % (field, pub_options_readonly)) indict = ckan.forms.get_package_dict(pkg, fs=fs) fs = get_fieldset().bind(pkg, data=indict) assert not fs.validate() assert len(fs.errors) == 1, fs.errors assert fs.errors.has_key(fs.published_by), fs.errors.keys()
def test_edit(self): CreateTestData.create_arbitrary( {u'name':u'name_before', u'title':u'title_before', u'url':u'testurl', u'resources':[{'url':'dlu1', 'format':'tf1'}, ], u'notes':u'testnotes', u'version':u'testversion', u'tags':['one', 'two'], u'license':'gpl-3.0', u'extras':{'key1':'value1', 'key2':'value2'}, } ) pkg = model.Package.by_name(u'name_before') fs = ckan.forms.get_standard_fieldset(is_admin=False, user_editable_groups=[]) data = ckan.forms.add_to_package_dict( ckan.forms.get_package_dict(pkg=pkg, fs=fs, user_editable_groups=[]), self.params, pkg.id) fs = fs.bind(pkg, data=data) pkg2 = PackageSaver()._preview_pkg(fs, u'name_before', pkg.id) self._check_preview_pkg(pkg2, self.params) # Check nothing has changed in the model assert model.Package.by_name(u'name_before') assert not model.Package.by_name(u'name_after') assert not model.Tag.by_name(u'three') resources = model.Session.query(model.PackageResource).filter_by(url=u'dlu2c').first() assert resources is None, resources
def test_mapper_plugin_fired(self): with plugins.use_plugin('mapper_plugin') as mapper_plugin: CreateTestData.create_arbitrary([{'name':u'testpkg'}]) # remove this data CreateTestData.delete() assert len(mapper_plugin.added) == 2 # resource group table added automatically assert mapper_plugin.added[0].name == 'testpkg'
def setup_class(self): # create test data username = '******' self.pkgs = [ {'name': "ons_pkg", "extras": { "import_source": "ONS-ons_data_7_days_to_2011-05-10", "notes": "<p>Designation: National Statistics\n</p>", "national_statistic": "yes", } }, {'name': "ons_but_not_ns", "extras": { "import_source": "ONS-ons_data_7_days_to_2011-05-10", "notes": "<p>Designation: Excellent Statistics\n</p>", "national_statistic": "yes", } }, {'name': "not_ns_or_ons", "extras": { "import_source": "ONS-ons_data_7_days_to_2011-05-10", "national_statistic": "no", } }, {'name': "not_ns", "extras": { "import_source": "", "national_statistic": "no", } }, {'name': "local-authority-spend-over-500-london-borough-of-hackney", "title": "Payments to suppliers with a value over \u00a3500 from London Borough of Hackney", "extras": { "temporal_coverage-to": "2011-06-30", "temporal_coverage-from": "2010-09-01", "temporal_granularity": "month", "date_released": "2010-09-14", "geographic_coverage": "000000: ", "taxonomy_url": "", "openness_score": "0", "external_reference": "", "date_updated": "2011-07-26", "published_via": "", "agency": "", "precision": "per cent to two decimal places", "geographic_granularity": "local authority", "department": "London Borough of Hackney", "published_by": "London Borough of Hackney [15165]", "national_statistic": "yes", "openness_score_last_checked": "2011-06-06T17:02:46.802271", "mandate": "", "date_update_future": "", "update_frequency": "monthly", "categories": "Government"} }, ] CreateTestData.create_arbitrary(self.pkgs, extra_user_names=[username]) user = model.User.by_name(unicode(username)) assert user self.testclient = WsgiCkanClient(self.app, api_key=user.apikey)
def setup_class(self): if not is_search_supported(): raise SkipTest("Search not supported") self.ab = 'http://site.com/a/b.txt' self.cd = 'http://site.com/c/d.txt' self.ef = 'http://site.com/e/f.txt' self.pkgs = [ { 'name': 'pkg1', 'resources': [ { 'url': self.ab, 'description': 'This is site ab.', 'format': 'Excel spreadsheet', 'hash': 'abc-123', 'alt_url': 'alt1', 'extras': { 'size_extra': '100' }, }, { 'url': self.cd, 'description': 'This is site cd.', 'format': 'Office spreadsheet', 'hash': 'qwe-456', 'alt_url': 'alt2', 'extras': { 'size_extra': '200' }, }, ] }, { 'name': 'pkg2', 'resources': [ { 'url': self.cd, 'alt_url': 'alt1', 'description': 'This is site cd.' }, { 'url': self.ef, 'description': 'This is site ef.' }, { 'url': self.ef, 'description': 'This is site gh.' }, { 'url': self.ef, 'description': 'This is site ij.' }, ] }, ] CreateTestData.create_arbitrary(self.pkgs)
def setup_class(cls): # no. entities per page is hardcoded into the controllers, so # create enough of each here so that we can test pagination cls.num_groups = 22 # CS: nasty_string ignore groups = [u"group_%s" % str(i).zfill(2) for i in range(0, cls.num_groups)] CreateTestData.create_arbitrary([], extra_group_names=groups)
def setup_class(cls): TestAuth.setup_class() CreateTestData.create_group_hierarchy_test_data() for user in model.Session.query(model.User): cls.apikeys[user.name] = str(user.apikey) new_authz.CONFIG_PERMISSIONS.update(ORG_HIERARCHY_PERMISSIONS) CreateTestData.create_arbitrary( package_dicts=[{"name": "adataset", "groups": ["national-health-service"]}], extra_user_names=["john"] )
def setup(self): self.user_name = u'tester1' self.pkg_dict = {"name": u"lichfield-councillors", "title": "Councillors", "version": None, "url": "http://www.lichfielddc.gov.uk/data", "author": "Democratic and Legal", "author_email": None, "maintainer": "Web Team", "maintainer_email": "*****@*****.**", "notes": "A list of Lichfield District Councillors, together with contact details, political party and committees", "license_id": "localauth-withrights", "license": "OKD Compliant::Local Authority Copyright with data.gov.uk rights", "tags": ["committees", "cool", "councillors", "democracy", "lichfield", "meetings"], "groups": ["ukgov"], "extras": {"temporal_coverage-from": "", "date_updated": "2010-03-29", "temporal_coverage_to": "", "import_source": "COSPREAD-cospread-2010-03-31mk2.csv", "geographical_granularity": "local authority", "temporal_granularity": "", "agency": "", "geographic_granularity": "", "temporal_coverage-to": "", "published_by": "Scotland Office", "precision": "", "temporal_coverage_from": "", "taxonomy_url": "", "mandate": "", "categories": "", "geographic_coverage": "010000: Scotland", "external_reference": "", "national_statistic": "no", "date_update_future": "", "update_frequency": "Daily", "date_released": "2009-08-01"}, "resources": [{"url": "http://www.lichfielddc.gov.uk/site/custom_scripts/councillors_xml.php?viewBy=name", "format": "Other XML", "description": "", "hash": ""}]} CreateTestData.create_arbitrary([self.pkg_dict], extra_user_names=[self.user_name]) self.package_name = self.pkg_dict['name'] test_user = self.get_user_by_name(unicode(self.user_name)) self.extra_environ = { 'Authorization' : str(test_user.apikey) }
def setup_class(cls): # create data model.repo.init_db() pkgs = [] for letter in "abcd12": for i in range(0, 1): name = u"testpackage_%s_%s" % (letter, i) pkgs.append({"name": u"testpackage_%s_%s" % (letter, i), "title": u"%s Testpackage %s" % (letter, i)}) cls.num_pkgs = len(pkgs) CreateTestData.create_arbitrary(pkgs)
def test_edit_package(self): CreateTestData.create_arbitrary({'name': 'testpkg5', 'resources': [{'url': 'http://ff.com'}] }) pkg = model.Package.get('testpkg5') model.repo.new_revision() pkg.notes = 'A change' model.repo.commit_and_remove() pkg = model.Package.get('testpkg5') self._assert_search_index_has_correct_modification_time(pkg)
def test_0_reload(self): # create initial package num_pkgs = count_pkgs() pkg_dict = {'name':u'pkgname0', 'title':u'Boris', 'extras':{u'ref':'boris'}} assert not model.Package.by_name(pkg_dict['name']) CreateTestData.create_arbitrary([pkg_dict]) self.tsi.index() pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # load the package with same name and ref pkg_dict = {'name':u'pkgname0', 'title':u'Boris 2', 'extras':{u'ref':'boris'}} self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # load the package with different name, same ref pkg_dict = {'name':u'pkgname0changed', 'title':u'Boris 3', 'extras':{u'ref':'boris'}} CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) self.loader.load_package(pkg_dict) assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # for now we do not support renaming pkg = model.Package.by_name(pkg_dict['name']) assert pkg is None, pkg pkg = model.Package.by_name(u'pkgname0') assert pkg assert pkg.title == pkg_dict['title'] # load the package with same name, different ref - new package other_pkg_dict = pkg_dict pkg_dict = {'name':u'pkgname0', 'title':u'Boris 4', 'extras':{u'ref':'boris-4'}} CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) self.loader.load_package(pkg_dict) assert pkg_dict['name'] == 'pkgname0_' orig_pkg = model.Package.by_name(u'pkgname0') assert orig_pkg assert orig_pkg.title == u'Boris 3' pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs)
def setup(self): # Create fixtures CreateTestData.create_arbitrary([ {'name': 'serviceA', 'extras': {'coupled-resource': json.dumps( [{'href': [ref_prefix+'Bref']}, {'href': [ref_prefix+'Href']}, {'href': [ref_prefix+'Eref']}]), 'resource-type': 'service'}}, {'name': 'serviceF', 'extras': {'coupled-resource': json.dumps( [{'href': [ref_prefix+'Dref']}]), 'resource-type': 'service'}}, {'name': 'serviceG', 'extras': {'coupled-resource': json.dumps( [{'href': [ref_prefix+'Gref']}]), 'resource-type': 'service'}}, {'name': 'datasetB', 'extras': {'resource-type': 'dataset'}}, {'name': 'datasetC', 'extras': {'resource-type': 'dataset'}}, {'name': 'datasetD', 'extras': {'resource-type': 'dataset'}}, {'name': 'datasetE', 'extras': {'resource-type': 'dataset'}}, {'name': 'datasetG', 'extras': {'resource-type': 'dataset'}}, {'name': 'datasetH', 'extras': {'resource-type': 'dataset'}}, {'name': 'serviceD', 'extras': {'coupled-resource': json.dumps( [{'href': [ref_prefix+'Dref']}]), 'resource-type': 'service'}}, ]) self._create_user() self._create_publisher() self.source, self.job = self._create_source_and_job() self._create_harvest_object('datasetB', ref='Bref') self._create_harvest_object('datasetC', ref='Cref') self._create_harvest_object('datasetD', ref='Dref') self._create_harvest_object('datasetE', ref='Eref') # Create a partially-filled coupling table self._create_coupled_resource('serviceA', 'Bref', 'datasetB') self._create_coupled_resource('serviceA', 'Cref', 'datasetC') self._create_coupled_resource(None, 'Dref', 'datasetD') self._create_coupled_resource('serviceA', 'Eref', None) self._create_coupled_resource('serviceF', 'Dref', 'datasetD') model.Session.commit() model.Session.remove() self.couples_before = self._get_coupled_resources() pprint(self.couples_before) assert_equal(len(self.couples_before), 5)
def setup_class(cls): # no. entities per page is hardcoded into the controllers, so # create enough of each here so that we can test pagination cls.num_users = 21 # CS: nasty_string ignore users = [u'user_%s' % str(i).zfill(2) for i in range(cls.num_users)] CreateTestData.create_arbitrary( [], extra_user_names = users, )
def _new_pkg(self, index): pkg_name = u"testpkg%i" % index CreateTestData.create_arbitrary([{"name": pkg_name, "admins": [u"annafan"]}]) pkg = model.Package.by_name(pkg_name) user = model.User.by_name(u"annafan") assert pkg assert user model.repo.commit_and_remove() user = model.User.by_name(u"annafan") return pkg_name, user
def test_new_resource(self): CreateTestData.create_arbitrary({'name': 'testpkg4', 'resources': [{'url': 'http://ff.com'}] }) model.repo.new_revision() p = model.Package.get("testpkg4") p.add_resource(url="http://fake_url/", format="HTML", description="A test resource") model.Session.add(p) model.repo.commit_and_remove() pkg = model.Package.get("testpkg4") self._assert_search_index_has_correct_modification_time(pkg)
def test_15_tag_autocomplete_tag_with_spaces(self): """Asserts autocomplete finds tags that contain spaces""" CreateTestData.create_arbitrary( [{"name": u"package-with-tag-that-has-a-space-1", "tags": [u"with space"], "license": "never_heard_of_it"}] ) postparams = "%s=1" % json.dumps({"q": "w"}) res = self.app.post("/api/action/tag_autocomplete", params=postparams) res_obj = json.loads(res.body) assert res_obj["success"] assert "with space" in res_obj["result"], res_obj["result"]
def test_15_tag_autocomplete_is_case_insensitive(self): CreateTestData.create_arbitrary([{ 'name': u'package-with-tag-that-has-a-capital-letter-3', 'tags': [u'MIX of CAPITALS and LOWER case'], 'license': 'never_heard_of_it', }]) postparams = '%s=1' % json.dumps({'q':u'lower case'}) res = self.app.post('/api/action/tag_autocomplete', params=postparams) res_obj = json.loads(res.body) assert res_obj['success'] assert 'MIX of CAPITALS and LOWER case' in res_obj['result'], res_obj['result']
def setup_class(self): indexer = TestSearchIndexer() init_data = [{'name':u'test1-penguin-canary', 'tags':u'canary goose squirrel wombat wombat'}, {'name':u'test2-squirrel-squirrel-canary-goose', 'tags':u'penguin wombat'}, ] CreateTestData.create_arbitrary(init_data) self.pkg_names = [u'test1-penguin-canary', u'test2-squirrel-squirrel-canary-goose'] indexer.index() self.backend = get_backend(backend='sql')
def _new_pkg(self, index): pkg_name = u'testpkg%i' % index CreateTestData.create_arbitrary([{'name':pkg_name, 'admins':[u'annafan']}]) pkg = model.Package.by_name(pkg_name) user = model.User.by_name(u'annafan') assert pkg assert user model.repo.commit_and_remove() user = model.User.by_name(u'annafan') return pkg_name, user
def setup_class(cls): # create data model.repo.init_db() pkgs = [] for letter in 'abcd12': for i in range(0, 10): pkgs.append({ 'name': u'testpackage_%s_%s' % (letter, i), 'title': u'%s Testpackage %s' % (letter, i), }) cls.num_pkgs = len(pkgs) CreateTestData.create_arbitrary(pkgs)
def test_edit_package(self): CreateTestData.create_arbitrary({'name': 'testpkg5', 'resources': [{'url': 'http://ff.com'}] }) pkg = model.Package.get('testpkg5') self._mark_the_time() model.repo.new_revision() pkg.notes = 'A change' model.repo.commit_and_remove() pkg = model.Package.get('testpkg5') assert not self._has_modification_time_been_updated_since_the_mark(pkg) self._assert_post_determined_modification_time_is_correct(pkg)
def setup_class(cls): # Delete default user as it appears in the first page of results model.User.by_name(u"logged_in").purge() model.repo.commit_and_remove() # no. entities per page is hardcoded into the controllers, so # create enough of each here so that we can test pagination cls.num_users = 21 # CS: nasty_string ignore users = [u"user_%s" % str(i).zfill(2) for i in range(cls.num_users)] CreateTestData.create_arbitrary([], extra_user_names=users)
def setup_class(self): if hasattr(super(TestLoaderBase, self), 'setup_class'): super(TestLoaderBase, self).setup_class() CreateTestData.create_arbitrary([], extra_user_names=[USER]) user = model.User.by_name(USER) assert user if WSGI_CLIENT: self.testclient = WsgiCkanClient(self.app, api_key=user.apikey) else: self.sub_proc = self._start_ckan_server('test.ini') self.testclient = CkanClient(base_location='http://localhost:5000/api', api_key=user.apikey) self._wait_for_url(url='http://localhost:5000/api')
def test_6_sync_update_restrict(self): # create initial package pkg_name = u'test_sync_restrict' init_data = [{ 'name':pkg_name, 'title':'test_title', 'extras':{ 'notes':'Original notes', 'national_statistic':'yes', 'department':'dosac', }, }] CreateTestData.create_arbitrary(init_data) pkg = model.Package.by_name(pkg_name) assert pkg # edit it with form parameters indict = _get_blank_param_dict(pkg=pkg, fs=get_fieldset(restrict=1)) prefix = 'Package-%s-' % pkg.id indict[prefix + 'notes'] = u'some new notes' # try changing restricted params anyway new_name = u'testname4' indict[prefix + 'name'] = new_name indict[prefix + 'department'] = u'testdept' # don't supply national_statistic param at all fs = get_fieldset(restrict=1).bind(pkg, data=indict) CreateTestData.flag_for_deletion(new_name) model.repo.new_revision() fs.sync() model.repo.commit_and_remove() assert not model.Package.by_name(new_name) # unchanged outpkg = model.Package.by_name(pkg_name) # unchanged assert outpkg # test sync worked assert outpkg.notes == indict[prefix + 'notes'] # test gov fields extra_keys = outpkg.extras.keys() reqd_extras = { 'national_statistic':'yes', # unchanged 'department':init_data[0]['extras']['department'], # unchanged } for reqd_extra_key, reqd_extra_value in reqd_extras.items(): assert reqd_extra_key in extra_keys, 'Key "%s" not found in extras %r' % (reqd_extra_key, extra_keys) assert outpkg.extras[reqd_extra_key] == reqd_extra_value, \ 'Extra %s should equal %s but equals %s' % \ (reqd_extra_key, reqd_extra_value, outpkg.extras[reqd_extra_key])
def setup_class(cls): super(TestAuth, cls).setup_class() CreateTestData.create_group_hierarchy_test_data() cls.apikeys = {} for user in model.Session.query(model.User): cls.apikeys[user.name] = str(user.apikey) cls.sysadmin = get_action('get_site_user')({ 'model': model, 'ignore_auth': True }, {}) CreateTestData.create_arbitrary(package_dicts=[{ 'name': 'adataset', 'groups': ['national-health-service'] }], extra_user_names=['john']) cls.app = cls._get_test_app()
def _reset_data(self): model.Session.remove() model.repo.rebuild_db() CreateTestData.create() CreateTestData.create_arbitrary( {'name':self.editpkg_name, 'url':u'editpkgurl.com', 'tags':[u'mytesttag'], 'resources':[{'url':u'url escape: & umlaut: \xfc quote: "', 'description':u'description escape: & umlaut: \xfc quote "', }], }) self.editpkg = model.Package.by_name(self.editpkg_name) self.pkgid = self.editpkg.id self.offset = url_for(controller='package', action='edit', id=self.editpkg_name) self.editpkg = model.Package.by_name(self.editpkg_name) self.admin = model.User.by_name(u'testsysadmin') self.extra_environ_admin = {'REMOTE_USER': self.admin.name.encode('utf8')} self.extra_environ_russianfan = {'REMOTE_USER': '******'} self.res = None #get's refreshed by setup model.Session.remove()
def test_read_internal_links(self): pkg_name = u'link-test', CreateTestData.create_arbitrary([ {'name':pkg_name, 'notes':'Decoy link here: decoy:decoy, real links here: dataset:pkg-1, ' \ 'tag:tag_1 group:test-group-1 and a multi-word tag: tag:"multi word with punctuation."', } ]) offset = url_for(controller='package', action='read', id=pkg_name) res = self.app.get(offset) def check_link(res, controller, id): id_in_uri = id.strip('"').replace( ' ', '%20') # remove quotes and percent-encode spaces self.check_tag_and_data( res, 'a ', '%s/%s' % (controller, id_in_uri), '%s:%s' % (controller, id.replace('"', '"'))) check_link(res, 'dataset', 'pkg-1') check_link(res, 'tag', 'tag_1') check_link(res, 'tag', '"multi word with punctuation."') check_link(res, 'group', 'test-group-1') assert 'decoy</a>' not in res, res assert 'decoy"' not in res, res
def test_2_field_publisher_not_listed(self): # Create package CreateTestData.create_arbitrary({ 'name': u'test3', 'title': u'Test3', 'license': u'odc-pddl', 'notes': u'some', 'extras': { 'published_by': u'Unheard-of Department [56]', 'published_via': u'Another Unheard-of Department [57]', } }) pkg = model.Package.by_name(u'test3') fs = get_fieldset() fs = fs.bind(pkg) out = fs.render() assert out for field, numbered_publisher, publisher in [ (fs.published_by, u'Unheard-of Department [56]', u'Unheard-of Department *'), (fs.published_via, u'Another Unheard-of Department [57]', u'Another Unheard-of Department *') ]: pub_options = field.render() pub_options_readonly = field.render_readonly() assert '<select' in pub_options, pub_options expected_selected_field = '<option selected="selected" value="%s">%s</option>' % ( numbered_publisher, publisher) assert expected_selected_field in pub_options, 'In field %s could not find %r:\n%r' % ( field, expected_selected_field, pub_options) indict = ckan.forms.get_package_dict(pkg, fs=fs) fs = get_fieldset().bind(pkg, data=indict) assert fs.validate(), fs.errors
def assert_edit(user_name, publisher_name, status=200): # create a package to edit pkg_name = 'test3' + user_name + publisher_name test_pkg = self.get_package_fixture(pkg_name) test_pkg['groups'] = [publisher_name] if publisher_name else [] pkg = CreateTestData.create_arbitrary(test_pkg) # edit it offset = '/api/rest/package/%s' % pkg_name edited_pkg = copy.deepcopy(test_pkg) edited_pkg['title'] += ' edited' postparams = '%s=1' % json.dumps(edited_pkg) if user_name: extra_environ = {'Authorization': str(model.User.by_name(user_name).apikey)} else: extra_environ = {} result = self.app.put(offset, postparams, status=[status], extra_environ=extra_environ)
def test_edit_package(self): # create the package to be edited pkg_name = 'test4' test_pkg = self.get_package_fixture(pkg_name) pkg = CreateTestData.create_arbitrary(test_pkg) # edit it offset = '/api/rest/package/%s' % pkg_name edited_pkg = copy.deepcopy(test_pkg) edited_pkg['title'] = 'Edited title' postparams = '%s=1' % json.dumps(edited_pkg) result = self.app.put(offset, postparams, status=[200], extra_environ=self.extra_environ_sysadmin) # check returned dict is correct res = json.loads(result.body) assert_equal(res['name'], test_pkg['name']) assert res['id'] assert_equal(res['title'], 'Edited title') assert_equal(res['license_id'], test_pkg['license_id']) assert res['organization']['name'] == test_pkg['groups'][0] assert_equal(res['extras'].get('temporal_coverage-to'), test_pkg['extras']['temporal_coverage-to']) assert_equal(res['resources'][0].get('description'), test_pkg['resources'][0]['description']) assert_equal(set(res['tags']), set(test_pkg['tags'])) # check package was edited ok pkg = model.Package.by_name(test_pkg['name']) pkg_dict = get_action('package_show')(self.context, { 'id': test_pkg['name'] }) assert_equal(pkg.name, test_pkg['name']) assert_equal(pkg.title, 'Edited title') assert pkg.get_organization().name == test_pkg['groups'][0] assert_equal(pkg.extras.get('temporal_coverage-to'), test_pkg['extras']['temporal_coverage-to']) assert_equal(pkg.resources[0].description, test_pkg['resources'][0]['description']) assert_equal(set([tag['name'] for tag in pkg_dict['tags']]), set(test_pkg['tags']))
def create_common_fixtures(self): CreateTestData.create(commit_changesets=self.commit_changesets) CreateTestData.create_arbitrary([], extra_user_names=[self.user_name])
def create_package(self, data={}, **kwds): # Todo: A simpler method for just creating a package. CreateTestData.create_arbitrary(package_dicts=[data or kwds])
def setup_class(self): # create test data username = '******' self.pkgs = [ { 'name': "ons_pkg", "extras": { "import_source": "ONS-ons_data_7_days_to_2011-05-10", "notes": "<p>Designation: National Statistics\n</p>", "national_statistic": "yes", } }, { 'name': "ons_but_not_ns", "extras": { "import_source": "ONS-ons_data_7_days_to_2011-05-10", "notes": "<p>Designation: Excellent Statistics\n</p>", "national_statistic": "yes", } }, { 'name': "not_ns_or_ons", "extras": { "import_source": "ONS-ons_data_7_days_to_2011-05-10", "national_statistic": "no", } }, { 'name': "not_ns", "extras": { "import_source": "", "national_statistic": "no", } }, { 'name': "local-authority-spend-over-500-london-borough-of-hackney", "title": "Payments to suppliers with a value over \u00a3500 from London Borough of Hackney", "extras": { "temporal_coverage-to": "2011-06-30", "temporal_coverage-from": "2010-09-01", "temporal_granularity": "month", "date_released": "2010-09-14", "geographic_coverage": "000000: ", "taxonomy_url": "", "openness_score": "0", "external_reference": "", "date_updated": "2011-07-26", "published_via": "", "agency": "", "precision": "per cent to two decimal places", "geographic_granularity": "local authority", "department": "London Borough of Hackney", "published_by": "London Borough of Hackney [15165]", "national_statistic": "yes", "openness_score_last_checked": "2011-06-06T17:02:46.802271", "mandate": "", "date_update_future": "", "update_frequency": "monthly", "categories": "Government" } }, ] CreateTestData.create_arbitrary(self.pkgs, extra_user_names=[username]) user = model.User.by_name(unicode(username)) assert user self.testclient = WsgiCkanClient(self.app, api_key=user.apikey)
def test_new_package(self): CreateTestData.create_arbitrary({'name': 'testpkg'}) pkg = model.Package.get('testpkg') assert pkg self._assert_search_index_has_correct_modification_time(pkg)
def create(self, **kwargs): CreateTestData.create_arbitrary(self.pkgs, extra_user_names=[self.user_name], **kwargs)
def setup_class(cls): super(TestStatsPlugin, cls).setup_class() model.repo.rebuild_db() CreateTestData.create_arbitrary( [ { 'name': 'test1', 'groups': ['grp1'], 'tags': ['tag1'] }, { 'name': 'test2', 'groups': ['grp1', 'grp2'], 'tags': ['tag1'] }, { 'name': 'test3', 'groups': ['grp1', 'grp2'], 'tags': ['tag1', 'tag2'], 'private': True }, { 'name': 'test4' }, ], extra_user_names=['bob'], admins=['bob'], ) # hack revision timestamps to be this date week1 = datetime.datetime(2011, 1, 5) for rev in model.Session.query(model.Revision): rev.timestamp = week1 + datetime.timedelta(seconds=1) # week 2 rev = model.repo.new_revision() rev.author = 'bob' rev.timestamp = datetime.datetime(2011, 1, 12) model.Package.by_name(u'test2').delete() model.repo.commit_and_remove() # week 3 rev = model.repo.new_revision() rev.author = 'sandra' rev.timestamp = datetime.datetime(2011, 1, 19) model.Package.by_name(u'test3').title = 'Test 3' model.repo.commit_and_remove() rev = model.repo.new_revision() rev.author = 'sandra' rev.timestamp = datetime.datetime(2011, 1, 20) model.Package.by_name(u'test4').title = 'Test 4' model.repo.commit_and_remove() # week 4 rev = model.repo.new_revision() rev.author = 'bob' rev.timestamp = datetime.datetime(2011, 1, 26) model.Package.by_name(u'test3').notes = 'Test 3 notes' model.repo.commit_and_remove()
def test_1_reload(self): # create initial package num_pkgs = count_pkgs() pkg_dict = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'air', u'country': 'UK', #invariant u'last_updated': 'Monday', #variant }, 'resources': [{ 'url': 'pollution.com/1', 'description': 'ons/id/1' }], } bogus_dict = { 'name': u'bogus', 'title': u'Pollution', 'extras': { u'department': 'water', u'country': 'UK', u'last_updated': 'Monday', }, 'resources': [{ 'url': 'pollution.com/2', 'description': 'ons/id/2' }], } assert not model.Package.by_name(pkg_dict['name']) assert not model.Package.by_name(bogus_dict['name']) CreateTestData.create_arbitrary([pkg_dict, bogus_dict]) self.tsi.index() pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 1, pkg.resources # load the similar package: same title, updated resource, # BUT synonym department pkg_dict = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'sky', u'country': 'UK', #invariant u'last_updated': 'Tuesday', #variant }, 'resources': [{ 'url': 'pollution.com/id/1', 'description': 'ons/id/1' }], } self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert pkg.extras['country'] == pkg_dict['extras']['country'] assert pkg.extras['last_updated'] == pkg_dict['extras']['last_updated'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 1, pkg.resources assert pkg.resources[0].url == pkg_dict['resources'][0][ 'url'], pkg.resources[0].url assert pkg.resources[0].description == pkg_dict['resources'][0][ 'description'], pkg.resources[0]['description'] # load the different package: because of different department pkg_dict3 = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'river', u'country': 'UK', #invariant u'last_updated': 'Tuesday', #variant }, 'resources': [{ 'url': 'pollution.com/id/3', 'description': 'Lots of pollution | ons/id/3' }], } self.loader.load_package(pkg_dict3) CreateTestData.flag_for_deletion('pollution_') assert count_pkgs() == num_pkgs + 3, (count_pkgs() - num_pkgs) pkg_names = [ pkg.name for pkg in model.Session.query(model.Package).all() ] pkg = model.Package.by_name(u'pollution_') assert pkg assert pkg.extras['department'] == pkg_dict3['extras']['department']
def setup_class(self): CreateTestData.create_arbitrary([], extra_user_names=[u'friend']) self.authorizer = ckan.authz.Authorizer()
def test_5_sync_update(self): # create initial package init_data = [{ 'name': 'test_sync', 'title': 'test_title', 'extras': { 'external_reference': 'ref123', 'date_released': '2008-11-28', 'date_updated': '2008-04-01', 'date_update_future': '1/7/2009', 'geographic_granularity': 'testgran', 'geographic_coverage': '111000: England, Scotland, Wales', 'temporal_granularity': 'testtempgran', 'temporal_coverage-from': '2007-01-08', 'temporal_coverage-to': '2007-01-09', 'national_statistic': 'yes', 'precision': 'testprec', 'taxonomy_url': 'testtaxurl', 'published_by': 'Ealing PCT [2]', 'published_via': 'Department for Education [3]', }, }] CreateTestData.create_arbitrary(init_data) pkg = model.Package.by_name(u'test_sync') assert pkg # edit it with form parameters indict = _get_blank_param_dict(pkg=pkg, fs=get_fieldset()) prefix = 'Package-%s-' % pkg.id indict[prefix + 'name'] = u'testname2' indict[prefix + 'notes'] = u'some new notes' indict[prefix + 'tags'] = u'russian, tolstoy', indict[prefix + 'license_id'] = u'gpl-3.0' indict[prefix + 'date_released'] = u'27/11/2008' indict[prefix + 'date_updated'] = u'1/4/2008' indict[prefix + 'date_update_future'] = u'1/8/2010' indict[prefix + 'geographic_granularity'] = u'regional' indict[prefix + 'geographic_coverage-england'] = u'True' indict[prefix + 'geographic_coverage-wales'] = u'True' indict[prefix + 'temporal_granularity'] = u'year' indict[prefix + 'temporal_coverage-from'] = u'6/2008' indict[prefix + 'temporal_coverage-to'] = u'6/2009' indict[prefix + 'national_statistic'] = u'True' indict[prefix + 'precision'] = u'Nearest 1000' indict[prefix + 'taxonomy_url'] = u'http:/somewhere/about.html' indict[prefix + 'published_by'] = u'Department of Energy and Climate Change [4]' indict[prefix + 'published_via'] = u'National Health Service [1]' indict[prefix + 'resources-0-url'] = u'http:/1' indict[prefix + 'resources-0-format'] = u'xml' indict[prefix + 'resources-0-description'] = u'test desc' fs = get_fieldset().bind(pkg, data=indict) CreateTestData.flag_for_deletion(u'testname2') model.repo.new_revision() fs.sync() model.repo.commit_and_remove() outpkg = model.Package.by_name(u'testname2') assert outpkg.notes == indict[prefix + 'notes'] # test tags taglist = [tag.name for tag in outpkg.tags] assert u'russian' in taglist, taglist assert u'tolstoy' in taglist, taglist # test licenses assert outpkg.license assert indict[prefix + 'license_id'] == outpkg.license.id, outpkg.license.id # test resources assert len(outpkg.resources) == 1, outpkg.resources res = outpkg.resources[0] assert res.url == u'http:/1', res.url assert res.description == u'test desc', res.description assert res.format == u'xml', res.format # test gov fields extra_keys = outpkg.extras.keys() reqd_extras = { 'date_released': '2008-11-27', 'date_updated': '2008-04-01', 'date_update_future': '2010-08-01', 'geographic_granularity': indict[prefix + 'geographic_granularity'], 'geographic_coverage': '101000: England, Wales', 'temporal_granularity': indict[prefix + 'temporal_granularity'], 'temporal_coverage-from': '2008-06', 'temporal_coverage-to': '2009-06', 'national_statistic': 'yes', 'precision': indict[prefix + 'precision'], 'taxonomy_url': indict[prefix + 'taxonomy_url'], 'published_by': indict[prefix + 'published_by'], 'published_via': indict[prefix + 'published_via'], } for reqd_extra_key, reqd_extra_value in reqd_extras.items(): assert reqd_extra_key in extra_keys, 'Key "%s" not found in extras %r' % ( reqd_extra_key, extra_keys) assert outpkg.extras[reqd_extra_key] == reqd_extra_value, \ 'Extra %s should equal %s but equals %s' % \ (reqd_extra_key, reqd_extra_value, outpkg.extras[reqd_extra_key])
def test_0_reload(self): # create initial package num_pkgs = count_pkgs() pkg_dict = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'air', u'country': 'UK', #invariant u'last_updated': 'Monday', #variant }, 'resources': [{ 'url': 'pollution.com/1', 'description': 'ons/id/1' }], } bogus_dict = { 'name': u'bogus', 'title': u'Pollution', 'extras': { u'department': 'water', u'country': 'UK', u'last_updated': 'Monday', }, 'resources': [{ 'url': 'pollution.com/2', 'description': 'ons/id/2' }], } assert not model.Package.by_name(pkg_dict['name']) assert not model.Package.by_name(bogus_dict['name']) CreateTestData.create_arbitrary([pkg_dict, bogus_dict]) self.tsi.index() pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 1, pkg.resources # load the same package: same title, department, updated resource pkg_dict = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'air', u'country': 'UK', #invariant u'last_updated': 'Tuesday', #variant }, 'resources': [{ 'url': 'pollution.com/id/1', 'description': 'ons/id/1' }], } self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert pkg.extras['country'] == pkg_dict['extras']['country'] assert pkg.extras['last_updated'] == pkg_dict['extras']['last_updated'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 1, pkg.resources assert pkg.resources[0].url == pkg_dict['resources'][0][ 'url'], pkg.resources[0].url assert pkg.resources[0].description == pkg_dict['resources'][0][ 'description'], pkg.resources[0]['description'] # load the same package: same title, department, new resource pkg_dict2 = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'air', u'country': 'UK', #invariant u'last_updated': 'Tuesday', #variant }, 'resources': [{ 'url': 'pollution.com/id/3', 'description': 'ons/id/3' }], } self.loader.load_package(pkg_dict2) pkg = model.Package.by_name(pkg_dict2['name']) assert pkg assert pkg.name == pkg_dict2['name'] assert pkg.title == pkg_dict2['title'] assert pkg.extras['country'] == pkg_dict2['extras']['country'] assert pkg.extras['last_updated'] == pkg_dict2['extras'][ 'last_updated'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 2, pkg.resources print pkg.resources assert_equal(pkg.resources[0].url, pkg_dict['resources'][0]['url']) assert pkg.resources[0].description == pkg_dict['resources'][0][ 'description'], pkg.resources[0]['description'] assert pkg.resources[1].url == pkg_dict2['resources'][0][ 'url'], pkg.resources[1].url assert pkg.resources[1].description == pkg_dict2['resources'][0][ 'description'], pkg.resources[1]['description'] # load the different package: because of different department pkg_dict3 = { 'name': u'pollution', 'title': u'Pollution', 'extras': { u'department': 'river', u'country': 'UK', #invariant u'last_updated': 'Tuesday', #variant }, 'resources': [{ 'url': 'pollution.com/id/3', 'description': 'Lots of pollution | ons/id/3' }], } self.loader.load_package(pkg_dict3) CreateTestData.flag_for_deletion('pollution_') assert count_pkgs() == num_pkgs + 3, (count_pkgs() - num_pkgs) pkg_names = [ pkg.name for pkg in model.Session.query(model.Package).all() ] pkg = model.Package.by_name(u'pollution_') assert pkg assert pkg.extras['department'] == pkg_dict3['extras']['department'] # load the same package: but with different country # should just get a warning pkg_dict4 = { 'name': u'pollution', 'title': u'Pollution', 'extras': OrderedDict([ (u'department', 'air'), (u'country', 'UK and France'), #invariant (u'last_updated', 'Tuesday'), #variant ]), 'resources': [ OrderedDict([ ('url', 'pollution.com/id/3'), ('description', 'Lots of pollution | ons/id/3'), ]) ], } self.loader.load_package(pkg_dict4) pkg = model.Package.by_name(pkg_dict4['name']) assert pkg assert pkg.name == pkg_dict4['name'] assert pkg.title == pkg_dict4['title'] assert pkg.extras['country'] == pkg_dict4['extras']['country'] assert pkg.extras['last_updated'] == pkg_dict4['extras'][ 'last_updated'] assert count_pkgs() == num_pkgs + 3, (count_pkgs() - num_pkgs) assert len(pkg.resources) == 2, pkg.resources assert pkg.resources[0].url == pkg_dict['resources'][0][ 'url'], pkg.resources[0].url assert pkg.resources[0].description == pkg_dict['resources'][0][ 'description'], pkg.resources[0]['description'] assert pkg.resources[1].url == pkg_dict4['resources'][0][ 'url'], pkg.resources[1].url assert pkg.resources[1].description == pkg_dict4['resources'][0][ 'description'], pkg.resources[1]['description']
def setup(self): self.user_name = u'tester1' self.pkg_dict = { "name": u"lichfield-councillors", "title": "Councillors", "version": None, "url": "http://www.lichfielddc.gov.uk/data", "author": "Democratic and Legal", "author_email": None, "maintainer": "Web Team", "maintainer_email": "*****@*****.**", "notes": "A list of Lichfield District Councillors, together with contact details, political party and committees", "license_id": "localauth-withrights", "license": "OKD Compliant::Local Authority Copyright with data.gov.uk rights", "tags": [ "committees", "cool", "councillors", "democracy", "lichfield", "meetings" ], "groups": ["ukgov"], "extras": { "temporal_coverage-from": "", "date_updated": "2010-03-29", "temporal_coverage_to": "", "import_source": "COSPREAD-cospread-2010-03-31mk2.csv", "geographical_granularity": "local authority", "temporal_granularity": "", "agency": "", "geographic_granularity": "", "temporal_coverage-to": "", "published_by": "Scotland Office", "precision": "", "temporal_coverage_from": "", "taxonomy_url": "", "mandate": "", "categories": "", "geographic_coverage": "010000: Scotland", "external_reference": "", "national_statistic": "no", "date_update_future": "", "update_frequency": "Daily", "date_released": "2009-08-01" }, "resources": [{ "url": "http://www.lichfielddc.gov.uk/site/custom_scripts/councillors_xml.php?viewBy=name", "format": "Other XML", "description": "", "hash": "" }] } CreateTestData.create_arbitrary([self.pkg_dict], extra_user_names=[self.user_name]) self.package_name = self.pkg_dict['name'] test_user = self.get_user_by_name(unicode(self.user_name)) self.extra_environ = {'Authorization': str(test_user.apikey)}
def setup(self): # Create fixtures CreateTestData.create_arbitrary([ { 'name': 'serviceA', 'extras': { 'coupled-resource': json.dumps([{ 'href': [ref_prefix + 'Bref'] }, { 'href': [ref_prefix + 'Href'] }, { 'href': [ref_prefix + 'Eref'] }]), 'resource-type': 'service' } }, { 'name': 'serviceF', 'extras': { 'coupled-resource': json.dumps([{ 'href': [ref_prefix + 'Dref'] }]), 'resource-type': 'service' } }, { 'name': 'serviceG', 'extras': { 'coupled-resource': json.dumps([{ 'href': [ref_prefix + 'Gref'] }]), 'resource-type': 'service' } }, { 'name': 'datasetB', 'extras': { 'resource-type': 'dataset' } }, { 'name': 'datasetC', 'extras': { 'resource-type': 'dataset' } }, { 'name': 'datasetD', 'extras': { 'resource-type': 'dataset' } }, { 'name': 'datasetE', 'extras': { 'resource-type': 'dataset' } }, { 'name': 'datasetG', 'extras': { 'resource-type': 'dataset' } }, { 'name': 'datasetH', 'extras': { 'resource-type': 'dataset' } }, { 'name': 'serviceD', 'extras': { 'coupled-resource': json.dumps([{ 'href': [ref_prefix + 'Dref'] }]), 'resource-type': 'service' } }, ]) self._create_user() self._create_publisher() self.source, self.job = self._create_source_and_job() self._create_harvest_object('datasetB', ref='Bref') self._create_harvest_object('datasetC', ref='Cref') self._create_harvest_object('datasetD', ref='Dref') self._create_harvest_object('datasetE', ref='Eref') # Create a partially-filled coupling table self._create_coupled_resource('serviceA', 'Bref', 'datasetB') self._create_coupled_resource('serviceA', 'Cref', 'datasetC') self._create_coupled_resource(None, 'Dref', 'datasetD') self._create_coupled_resource('serviceA', 'Eref', None) self._create_coupled_resource('serviceF', 'Dref', 'datasetD') model.Session.commit() model.Session.remove() self.couples_before = self._get_coupled_resources() pprint(self.couples_before) assert_equal(len(self.couples_before), 5)
def test_0_reload(self): # create initial package num_pkgs = count_pkgs() pkg_dict = { 'name': u'pkgname0', 'title': u'Boris', 'extras': { u'ref': 'boris' } } assert not model.Package.by_name(pkg_dict['name']) CreateTestData.create_arbitrary([pkg_dict]) self.tsi.index() pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # load the package with same name and ref pkg_dict = { 'name': u'pkgname0', 'title': u'Boris 2', 'extras': { u'ref': 'boris' } } self.loader.load_package(pkg_dict) pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # load the package with different name, same ref pkg_dict = { 'name': u'pkgname0changed', 'title': u'Boris 3', 'extras': { u'ref': 'boris' } } CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) self.loader.load_package(pkg_dict) assert count_pkgs() == num_pkgs + 1, (count_pkgs() - num_pkgs) # for now we do not support renaming pkg = model.Package.by_name(pkg_dict['name']) assert pkg is None, pkg pkg = model.Package.by_name(u'pkgname0') assert pkg assert pkg.title == pkg_dict['title'] # load the package with same name, different ref - new package other_pkg_dict = pkg_dict pkg_dict = { 'name': u'pkgname0', 'title': u'Boris 4', 'extras': { u'ref': 'boris-4' } } CreateTestData.flag_for_deletion(pkg_names=[pkg_dict['name']]) self.loader.load_package(pkg_dict) assert pkg_dict['name'] == 'pkgname0_' orig_pkg = model.Package.by_name(u'pkgname0') assert orig_pkg assert orig_pkg.title == u'Boris 3' pkg = model.Package.by_name(pkg_dict['name']) assert pkg assert pkg.name == pkg_dict['name'] assert pkg.title == pkg_dict['title'] assert count_pkgs() == num_pkgs + 2, (count_pkgs() - num_pkgs)