def load_from_databank(sourcejson, dataproviderjson, dry_run=False, overwrite=True, meta_only=False, file_dir = None): print "Working on ", sourcejson['fields']['indicator'] dataorg = DataOrg.by_name(dataproviderjson['fields']['title']) dataorgMeta = { 'description': dataproviderjson['fields']['description'], 'label': dataproviderjson['fields']['title'] } if not dataorg: dataorg = DataOrg(dataorgMeta) db.session.add(dataorg) #dataorg will update with id here db.session.commit() #get or create dataset dataset = Dataset.by_label(sourcejson['fields']['indicator']) description = "http://databank.edip-maps.net/admin/etldata/dataconnection/" + str(sourcejson['pk']) + "/" modelDataset = {'dataset': { 'label': sourcejson['fields']['indicator'], 'name': sourcejson['fields']['indicator'], 'description': description, 'dataType': sourcejson['fields']['data_type'], 'dataorg_id': dataorg.id } } if not dataset: #create one dataset = Dataset(modelDataset['dataset']) #dataset.ORoperations = dataproviderjson['fields'].get('ORoperations', {}) #dataset.data = dataproviderjson['fields'].get('mapping',{}) db.session.add(dataset) else: #dataset.ORoperations = dataproviderjson['fields'].get('ORoperations', {}) #dataset.data = dataproviderjson['fields'].get('mapping',{}) dataset.update(modelDataset['dataset']) db.session.commit() systemaccount = Account.by_id(1) if dataset.source: try: print "trying to delete source" print dataset.source dataset.source.delete() except Exception, e: print "could not delete source", e
def test_browser_for_entity(self): h.skip_if_stubbed_solr() from openspending.model import Dataset, Entry dataset = Dataset(name='testdataset') Dataset.c.save(dataset, manipulate=True) dataset_ref_dict = dataset.to_ref_dict() entity = self._make_one(name="Test Entity", label="Test Entity Label") entity_ref_dict = entity.to_ref_dict() entry = {'name': 'Test Entry', 'label': 'Test Entry Label', 'from': entity_ref_dict, 'to': entity_ref_dict, 'amount': 10.0, 'dataset': dataset_ref_dict} Entry.c.save(entry) h.clean_and_reindex_solr() entity_url = url(controller='entity', id=str(entity['_id']), slug='test-entity-label', action='view') response = self.app.get(entity_url) h.assert_equal(response._status, '200 OK') h.assert_true('<b>1 entries</b> found.<br />' in response) h.assert_true('entries.json">' in response) h.assert_true('entries.csv">' in response)
def create(self): require.dataset.create() try: dataset = dict(request.params) dataset['territories'] = request.params.getall('territories') dataset['languages'] = request.params.getall('languages') model = {'dataset': dataset} schema = dataset_schema(ValidationState(model)) data = schema.deserialize(dataset) if Dataset.by_name(data['name']) is not None: raise Invalid( SchemaNode(String(), name='dataset.name'), _("A dataset with this identifer already exists!")) dataset = Dataset({'dataset': data}) dataset.private = True dataset.managers.append(c.account) db.session.add(dataset) db.session.commit() redirect( h.url_for(controller='editor', action='index', dataset=dataset.name)) except Invalid, i: errors = i.asdict() return self.new(errors)
def create(self): """ Adds a new dataset dynamically through a POST request """ # User must be authenticated so we should have a user object in # c.account, if not abort with error message if not c.account: abort(status_code=400, detail='user not authenticated') # Check if the params are there ('metadata', 'csv_file') if len(request.params) != 2: abort(status_code=400, detail='incorrect number of params') metadata = request.params['metadata'] \ if 'metadata' in request.params \ else abort(status_code=400, detail='metadata is missing') csv_file = request.params['csv_file'] \ if 'csv_file' in request.params \ else abort(status_code=400, detail='csv_file is missing') # We proceed with the dataset try: model = json.load(urllib2.urlopen(metadata)) except: abort(status_code=400, detail='JSON model could not be parsed') try: log.info("Validating model") model = validate_model(model) except Invalid as i: log.error("Errors occured during model validation:") for field, error in i.asdict().items(): log.error("%s: %s", field, error) abort(status_code=400, detail='Model is not well formed') dataset = Dataset.by_name(model['dataset']['name']) if dataset is None: dataset = Dataset(model) require.dataset.create() dataset.managers.append(c.account) dataset.private = True # Default value db.session.add(dataset) else: require.dataset.update(dataset) log.info("Dataset: %s", dataset.name) source = Source(dataset=dataset, creator=c.account, url=csv_file) log.info(source) for source_ in dataset.sources: if source_.url == csv_file: source = source_ break db.session.add(source) db.session.commit() # Send loading of source into celery queue load_source.delay(source.id) return to_jsonp(dataset_apply_links(dataset.as_dict()))
def test_views_update(self): cra = Dataset.by_name('cra') views = cra.data['views'] views[0]['label'] = 'Banana' response = self.app.post(url(controller='editor', action='views_update', dataset='cra'), params={'views': json.dumps(views)}, extra_environ={'REMOTE_USER': '******'}, expect_errors=True) assert '200' in response.status, response.status cra = Dataset.by_name('cra') assert 'Banana' in repr(cra.data['views'])
def test_retract(self): cra = Dataset.by_name('cra') assert cra.private is False, cra.private response = self.app.post(url(controller='editor', action='retract', dataset='cra'), extra_environ={'REMOTE_USER': '******'}) cra = Dataset.by_name('cra') assert cra.private is True, cra.private response = self.app.post(url(controller='editor', action='retract', dataset='cra'), extra_environ={'REMOTE_USER': '******'}, expect_errors=True) assert '400' in response.status, response.status
class TestCompoundDimension(DatabaseTestCase): def setup(self): super(TestCompoundDimension, self).setup() self.engine = db.engine self.meta = db.metadata self.meta.bind = self.engine self.ds = Dataset(SIMPLE_MODEL) self.entity = self.ds['to'] self.classifier = self.ds['function'] def test_is_compound(self): h.assert_true(self.entity.is_compound) def test_basic_properties(self): assert self.entity.name=='to', self.entity.name assert self.classifier.name=='function', self.classifier.name def test_generated_tables(self): #assert not hasattr(self.entity, 'table'), self.entity #self.ds.generate() assert hasattr(self.entity, 'table'), self.entity assert self.entity.table.name=='test__' + self.entity.taxonomy, self.entity.table.name assert hasattr(self.entity, 'alias') assert self.entity.alias.name==self.entity.name, self.entity.alias.name cols = self.entity.table.c assert 'id' in cols assert_raises(KeyError, cols.__getitem__, 'field') def test_attributes_exist_on_object(self): assert len(self.entity.attributes)==2, self.entity.attributes assert_raises(KeyError, self.entity.__getitem__, 'field') assert self.entity['name'].name=='name' assert self.entity['name'].datatype=='id' def test_attributes_exist_on_table(self): assert hasattr(self.entity, 'table'), self.entity assert 'name' in self.entity.table.c, self.entity.table.c assert 'label' in self.entity.table.c, self.entity.table.c def test_members(self): self.ds.generate() self.entity.load(self.ds.bind, {'name': 'one', 'label': 'Label One'}) self.entity.load(self.ds.bind, {'name': 'two', 'label': 'Label Two'}) members = list(self.entity.members()) h.assert_equal(len(members), 2) members = list(self.entity.members(self.entity.alias.c.name == 'one')) h.assert_equal(len(members), 1)
def test_publish(self): cra = Dataset.by_name('cra') cra.private = True db.session.commit() response = self.app.post(url(controller='editor', action='publish', dataset='cra'), extra_environ={'REMOTE_USER': '******'}) cra = Dataset.by_name('cra') assert cra.private is False, cra.private response = self.app.post(url(controller='editor', action='publish', dataset='cra'), extra_environ={'REMOTE_USER': '******'}, expect_errors=True) assert '400' in response.status, response.status
def csvimport_fixture(name): model_fp = csvimport_fixture_file(name, 'model.json') mapping_fp = csvimport_fixture_file(name, 'mapping.json') model = json.load(model_fp) if mapping_fp: model['mapping'] = json.load(mapping_fp) dataset = Dataset(model) dataset.generate() db.session.add(dataset) data_path = csvimport_fixture_path(name, 'data.csv') user = make_account() source = Source(dataset, user, data_path) db.session.add(source) db.session.commit() return source
def csvimport_fixture(name): model_fp = csvimport_fixture_file(name, 'model.json') mapping_fp = csvimport_fixture_file(name, 'mapping.json') model = json.load(model_fp) if mapping_fp: model['mapping'] = json.load(mapping_fp) dataset = Dataset(model) dataset.generate() db.session.add(dataset) data_path = csvimport_fixture_path(name, 'data.csv') user = h.make_account() source = Source(dataset, user, data_path) db.session.add(source) db.session.commit() return source
def archive_one(dataset_name, archive_dir): """ Find the dataset, create the archive directory and start archiving """ # Find the dataset dataset = Dataset.by_name(dataset_name) # If no dataset found, exit with error message if dataset is None: exit_with_error("Dataset not found. Unable to archive it.") # If the archive_dir exists we have to ask the user if we should overwrite if os.path.exists(archive_dir): # If user doesn't want to write over it we exit if not get_confirmation("%s exists. Do you want to overwrite?" % archive_dir): sys.exit(0) # If the archive dir is a file we don't do anything if os.path.isfile(archive_dir): exit_with_error("Cannot overwrite a file (need a directory).") # If the archive_dir doesn't exist we create it else: try: os.makedirs(archive_dir) except OSError: # If we couldn't create it, we exit with an error message exit_with_error("Couldn't create archive directory.") # Archive the model (dataset metadata) archive_model(dataset, archive_dir) # Archive the visualisations archive_visualisations(dataset, archive_dir) # Download all sources update(os.path.join(archive_dir, 'sources'), dataset)
def test_drop(self): cra = Dataset.by_name('cra') assert len(cra)==36, len(cra) # double-check authz response = self.app.post(url(controller='editor', action='drop', dataset='cra'), expect_errors=True) assert '403' in response.status cra = Dataset.by_name('cra') assert len(cra)==36, len(cra) response = self.app.post(url(controller='editor', action='drop', dataset='cra'), extra_environ={'REMOTE_USER': '******'}) cra = Dataset.by_name('cra') assert len(cra)==0, len(cra)
def create_view(self, cls, add_filters, name, label, dimension, breakdown=None, view_filters={}): '''\ Create a view. The view will be computed when you call :meth:`finalize`. ``cls`` A model class (inheriting from :class:`openspending.model.Base`) ``add_filters`` A :term:`mongodb query spec` used as a query to select the instances of *cls* that will be used for the view. ``name`` A name for the view. This name must be unique for all views for *cls* in an Open Spending site. ``label`` A label that can be displayed to the user. ``dimensions`` The dimensions that will be used to compute the view ``breakdown`` ... ``view_filters`` ... Returns: A :class:`openspending.lib.views.View` object. ''' log.debug("pre-aggregating view %s on %r where %r", name, cls, view_filters) view = View(self.dataset, name, label, dimension, breakdown, cuts=view_filters) view.apply_to(cls, add_filters) view.compute() Dataset.c.update({'name': self.dataset.name}, {'$set': {'cubes': self.dataset.get('cubes', {})}}) self.dataset = Dataset.by_id(self.dataset.name) return view
def setup(self): self.engine = db.engine self.meta = db.metadata #MetaData() self.meta.bind = self.engine self.ds = Dataset(SIMPLE_MODEL) self.entity = self.ds['to'] self.classifier = self.ds['function']
def setup(self): super(TestAttributeDimension, self).setup() self.engine = db.engine self.meta = db.metadata self.meta.bind = self.engine self.ds = Dataset(model_fixture('simple')) self.field = self.ds['field']
def setup(self): super(TestAttributeDimension, self).setup() self.engine = db.engine self.meta = db.metadata self.meta.bind = self.engine self.ds = Dataset(SIMPLE_MODEL) self.field = self.ds['field']
def test_new_wrong_user(self): # First we add a Dataset with user 'test_new' user = Account.by_name('test_new') assert user.api_key == 'd0610659-627b-4403-8b7f-6e2820ebc95d' u = url(controller='api/version2', action='create') params = { 'metadata': 'https://dl.dropbox.com/u/3250791/sample-openspending-model.json', 'csv_file': 'http://mk.ucant.org/info/data/sample-openspending-dataset.csv' } apikey_header = 'apikey {0}'.format(user.api_key) response = self.app.post(u, params, {'Authorization': apikey_header}) assert "200" in response.status assert Dataset.by_name('openspending-example') # After that we try to update the Dataset with user 'test_new2' user = Account.by_name('test_new2') assert user.api_key == 'c011c340-8dad-419c-8138-1c6ded86ead5' u = url(controller='api/version2', action='create') params = { 'metadata': 'https://dl.dropbox.com/u/3250791/sample-openspending-model.json', 'csv_file': 'http://mk.ucant.org/info/data/sample-openspending-dataset.csv' } apikey_header = 'apikey {0}'.format(user.api_key) response = self.app.post(u, params, {'Authorization': apikey_header}, expect_errors=True) assert '403' in response.status
def test_new_wrong_user(self): # First we add a Dataset with user 'test_new' user = Account.by_name('test_new') assert user.api_key == 'd0610659-627b-4403-8b7f-6e2820ebc95d' u = url(controller='api/version2', action='create') params = { 'metadata': 'https://dl.dropbox.com/u/3250791/sample-openspending-model.json', 'csv_file': 'http://mk.ucant.org/info/data/sample-openspending-dataset.csv' } apikey_header = 'apikey {0}'.format(user.api_key) response = self.app.post(u, params, {'Authorization':apikey_header}) #Dataset.by_name('openspending-example').private = False assert "200" in response.status assert Dataset.by_name('openspending-example') # After that we try to update the Dataset with user 'test_new2' user = Account.by_name('test_new2') assert user.api_key == 'c011c340-8dad-419c-8138-1c6ded86ead5' u = url(controller='api/version2', action='create') params = { 'metadata': 'https://dl.dropbox.com/u/3250791/sample-openspending-model.json', 'csv_file': 'http://mk.ucant.org/info/data/sample-openspending-dataset.csv' } apikey_header = 'apikey {0}'.format(user.api_key) response = self.app.post(u, params, {'Authorization':apikey_header}, expect_errors=True) assert '403' in response.status
def permissions(self): """ Check a user's permissions for a given dataset. This could also be done via request to the user, but since we're not really doing a RESTful service we do this via the api instead. """ # Check the parameters. Since we only use one parameter we check it # here instead of creating a specific parameter parser if len(request.params) != 1 or 'dataset' not in request.params: return to_jsonp({'error': 'Parameter dataset missing'}) # Get the dataset we want to check permissions for dataset = Dataset.by_name(request.params['dataset']) # Return permissions return to_jsonp({ "create": can.dataset.create() and dataset is None, "read": False if dataset is None else can.dataset.read(dataset), "update": False if dataset is None else can.dataset.update(dataset), "delete": False if dataset is None else can.dataset.delete(dataset) })
def index(self): # Get all of the datasets available to the account of the logged in # or an anonymous user (if c.account is None) c.datasets = Dataset.all_by_account(c.account) c.num_entries = dataset_entries(None) return templating.render('home/index.html')
def remove_dataset(dataset_name): log.warn("Dropping dataset '%s'", dataset_name) from openspending.model import Dataset, meta as db dataset = Dataset.by_name(dataset_name) dataset.drop() db.session.delete(dataset) db.session.commit()
def archive_one(dataset_name, archive_dir): """ Find the dataset, create the archive directory and start archiving """ # Find the dataset dataset = Dataset.by_name(dataset_name) # If no dataset found, exit with error message if dataset is None: exit_with_error("Dataset not found. Unable to archive it.") # If the archive_dir exists we have to ask the user if we should overwrite if os.path.exists(archive_dir): # If user doesn't want to write over it we exit if not get_confirmation( "%s exists. Do you want to overwrite?" % archive_dir): sys.exit(0) # If the archive dir is a file we don't do anything if os.path.isfile(archive_dir): exit_with_error("Cannot overwrite a file (need a directory).") # If the archive_dir doesn't exist we create it else: try: os.makedirs(archive_dir) except OSError: # If we couldn't create it, we exit with an error message exit_with_error("Couldn't create archive directory.") # Archive the model (dataset metadata) archive_model(dataset, archive_dir) # Archive the visualisations archive_visualisations(dataset, archive_dir) # Download all sources update(os.path.join(archive_dir, 'sources'), dataset)
def test_index_hide_private(self): cra = Dataset.by_name('cra') cra.private = True db.session.commit() response = self.app.get(url(controller='dataset', action='index', format='json')) obj = json.loads(response.body) h.assert_equal(len(obj), 0)
def permissions(self): """ Check a user's permissions for a given dataset. This could also be done via request to the user, but since we're not really doing a RESTful service we do this via the api instead. """ # Check the parameters. Since we only use one parameter we check it # here instead of creating a specific parameter parser if len(request.params) != 1 or 'dataset' not in request.params: return to_jsonp({'error': 'Parameter dataset missing'}) # Get the dataset we want to check permissions for dataset = Dataset.by_name(request.params['dataset']) # Return permissions return to_jsonp({ "create":\ can.dataset.create() and dataset is None, "read":\ False if dataset is None else can.dataset.read(dataset), "update":\ False if dataset is None else can.dataset.update(dataset), "delete":\ False if dataset is None else can.dataset.delete(dataset) })
def test_index_hide_private(self): cra = Dataset.by_name('cra') cra.private = True db.session.commit() response = self.app.get(url(controller='dataset', action='index', format='json')) obj = json.loads(response.body) h.assert_equal(len(obj['datasets']), 0)
def test_delete(self): cra = Dataset.by_name('cra') assert len(cra) == 36, len(cra) # double-check authz response = self.app.post(url(controller='editor', action='delete', dataset='cra'), expect_errors=True) assert '403' in response.status cra = Dataset.by_name('cra') assert len(cra) == 36, len(cra) response = self.app.post(url(controller='editor', action='delete', dataset='cra'), extra_environ={'REMOTE_USER': '******'}) cra = Dataset.by_name('cra') assert cra is None, cra
def index(): #page = request.args.get('page') q = Dataset.get_all_admin().all() returnset = [] for theobj in q: returnset.append(theobj) # if len(fields) < 1 and not getsources: # return jsonify(q) # returnset = [] # for obj in q: # tempobj = {} # if len(fields) >0: # for field in fields: # tempobj[field] = getattr(obj, field) # else: # tempobj = obj.as_dict() # if getsources: # tempobj['sources'] = obj.sources.all() # returnset.append(tempobj) # TODO: Facets for territories and languages # TODO: filters on facet dimensions #maybe put the pager back in # print q # pager = Pager(q) return jsonify(returnset, headers={'Cache-Control': 'no-cache'})
def index(): #page = request.args.get('page') q = Dataset.get_all_admin().all() returnset = [] for theobj in q: returnset.append(theobj) # if len(fields) < 1 and not getsources: # return jsonify(q) # returnset = [] # for obj in q: # tempobj = {} # if len(fields) >0: # for field in fields: # tempobj[field] = getattr(obj, field) # else: # tempobj = obj.as_dict() # if getsources: # tempobj['sources'] = obj.sources.all() # returnset.append(tempobj) # TODO: Facets for territories and languages # TODO: filters on facet dimensions #maybe put the pager back in # print q # pager = Pager(q) return jsonify(returnset)
def test_index_hide_private(self): cra = Dataset.by_name("cra") cra.private = True db.session.commit() response = self.app.get(url(controller="dataset", action="index", format="json")) obj = json.loads(response.body) h.assert_equal(len(obj["datasets"]), 0)
def create(): """ This takes a json format post with label, name, description and creates a private dataset to put sources in The json_errors return a json object """ if not require.dataset.create(): return jsonify( {"errors": ["Can not create new dataset. Permission denied"]}) try: dataset = api_form_data() if not dataset.get("dataorg", None): return jsonify( {"errors": ["You must select the data source organization"]}) model = {'data': dataset} schema = dataset_schema(ValidationState(model)) data = schema.deserialize(dataset) #should have a better place for sluggify if (data.get('name', None)): tempname = slugify(str(data.get('name')), max_length=50) else: tempname = slugify(str(data.get('label')), max_length=50) if Dataset.by_name(tempname) is not None: return jsonify( {"errors": ["A dataset with this name already exists "]}) dataset = Dataset(data=data) dataset.managers.append(current_user) db.session.add(dataset) dataset_source = Source.by_source_name(dataset.name) if not dataset_source: dataset_source = Source(dataset=dataset, name=dataset.name) db.session.add(dataset_source) else: dataset_source.dataset = dataset #creating a new dataset so we have to create a source as well db.session.commit() return jsonify({"success": True, "dataset": dataset.name}) except Exception, e: ex_type, ex, tb = sys.exc_info() print traceback.print_tb(tb) return jsonify({"errors": ['Unknown Error has occurred: ' + str(e)]})
def index(self): # Get all of the datasets available to the account of the logged in # or an anonymous user (if c.account is None) c.datasets = Dataset.all_by_account(c.account) c.territories = DatasetTerritory.dataset_counts(c.datasets) c.num_entries = dataset_entries(None) return templating.render('home/index.html')
def setup(self): super(TestCompoundDimension, self).setup() self.engine = db.engine self.meta = db.metadata self.meta.bind = self.engine self.ds = Dataset(SIMPLE_MODEL) self.entity = self.ds['to'] self.classifier = self.ds['function']
def test_view_private(self): cra = Dataset.by_name('cra') cra.private = True db.session.commit() response = self.app.get(url(controller='dataset', action='view', dataset='cra'), status=403) h.assert_false('Country Regional Analysis v2009' in response, "'Country Regional Analysis v2009' not in response!") h.assert_false('36 spending entries' in response, "'36 spending entries' not in response!")
def get_or_create_dataset(model): """ Based on a provided model we get the model (if it doesn't exist we create it). """ # Get the dataset by the name provided in the model dataset = Dataset.by_name(model['dataset']['name']) # If the dataset wasn't found we create it if dataset is None: dataset = Dataset(model) db.session.add(dataset) db.session.commit() # Log information about the dataset and return it log.info("Dataset: %s", dataset.name) return dataset
def test_templates_update(self): response = self.app.post(url(controller='editor', action='templates_update', dataset='cra'), params={'serp_title': 'BANANA'}, extra_environ={'REMOTE_USER': '******'}, expect_errors=True) assert '200' in response.status, response.status cra = Dataset.by_name('cra') assert cra.serp_title == 'BANANA', cra.serp_title
def test_team_update(self): response = self.app.post(url(controller='editor', action='team_update', dataset='cra'), params={}, extra_environ={'REMOTE_USER': '******'}, expect_errors=True) assert '200' in response.status, response.status cra = Dataset.by_name('cra') assert len(cra.managers.all()) == 1, cra.managers
def test_core_update_invalid_currency(self): response = self.app.post(url(controller='editor', action='core_update', dataset='cra'), params={'name': 'cra', 'label': 'Common Rough Act', 'description': 'I\'m a banana', 'currency': 'glass pearls'}, extra_environ={'REMOTE_USER': '******'}) assert 'not a valid currency' in response.body cra = Dataset.by_name('cra') assert cra.currency=='GBP', cra.label
def test_view_private(self): cra = Dataset.by_name("cra") cra.private = True db.session.commit() response = self.app.get(url(controller="dataset", action="view", dataset="cra"), status=403) h.assert_false( "Country Regional Analysis v2009" in response, "'Country Regional Analysis v2009' not in response!" ) h.assert_false("openspending_browser" in response, "'openspending_browser' not in response!")
def test_core_update_invalid_territory(self): response = self.app.post(url(controller='editor', action='core_update', dataset='cra'), params={'name': 'cra', 'label': 'CRA', 'territories': 'su', 'description': 'I\'m a banana', 'currency': 'GBP'}, extra_environ={'REMOTE_USER': '******'}) assert not 'updated' in response.body cra = Dataset.by_name('cra') assert not 'su' in cra.territories
def test_distinct_regions(self): b = make_dataset() self.dat.add_region("region 1") self.dat.add_region("region 2") b.add_region("region 1") self.dat.save() b.save() assert Dataset.distinct_regions() == ["region 1", "region 2"]
def test_core_update_invalid_label(self): response = self.app.post(url(controller='editor', action='core_update', dataset='cra'), params={'name': 'cra', 'label': '', 'description': 'I\'m a banana', 'currency': 'GBP'}, extra_environ={'REMOTE_USER': '******'}) assert 'Required' in response.body cra = Dataset.by_name('cra') assert cra.label!='', cra.label
def test_view_private(self): cra = Dataset.by_name('cra') cra.private = True db.session.commit() response = self.app.get(url(controller='dataset', action='view', dataset='cra'), status=403) h.assert_false('Country Regional Analysis v2009' in response, "'Country Regional Analysis v2009' not in response!") h.assert_false('openspending_browser' in response, "'openspending_browser' not in response!")
def test_feeds(self): # Anonymous user with one public dataset response = self.app.get(url(controller='dataset', action='feed_rss'), expect_errors=True) assert 'application/xml' in response.content_type assert '<title>Recently Created Datasets</title>' in response assert '<item><title>Country Regional Analysis v2009' in response cra = Dataset.by_name('cra') cra.private = True db.session.add(cra) db.session.commit() # Anonymous user with one private dataset response = self.app.get(url(controller='dataset', action='feed_rss'), expect_errors=True) assert 'application/xml' in response.content_type assert '<title>Recently Created Datasets</title>' in response assert '<item><title>Country Regional Analysis v2009' not in response # Logged in user with one public dataset cra.private = False db.session.add(cra) db.session.commit() response = self.app.get(url(controller='dataset', action='feed_rss'), expect_errors=True, extra_environ={'REMOTE_USER': '******'}) assert 'application/xml' in response.content_type assert '<title>Recently Created Datasets</title>' in response assert '<item><title>Country Regional Analysis v2009' in response # Logged in user with one private dataset cra.private = True db.session.add(cra) db.session.commit() response = self.app.get(url(controller='dataset', action='feed_rss'), expect_errors=True, extra_environ={'REMOTE_USER': '******'}) assert 'application/xml' in response.content_type assert '<title>Recently Created Datasets</title>' in response assert '<item><title>Country Regional Analysis v2009' not in response # Logged in admin user with one private dataset admin_user = h.make_account('admin') admin_user.admin = True db.session.add(admin_user) db.session.commit() response = self.app.get(url(controller='dataset', action='feed_rss'), extra_environ={'REMOTE_USER': '******'}) assert '<title>Recently Created Datasets</title>' in response assert '<item><title>Country Regional Analysis v2009' in response assert 'application/xml' in response.content_type response = self.app.get(url(controller='dataset', action='index')) assert ('<link rel="alternate" type="application/rss+xml" title="' 'Latest Datasets on OpenSpending" href="/datasets.rss"' in response)
def test_core_update(self): response = self.app.post(url(controller='editor', action='core_update', dataset='cra'), params={'name': 'cra', 'label': 'Common Rough Act', 'description': 'I\'m a banana', 'currency': 'EUR', 'languages': 'en', 'territories': 'gb'}, extra_environ={'REMOTE_USER': '******'}) cra = Dataset.by_name('cra') assert cra.label=='Common Rough Act', cra.label assert cra.currency=='EUR', cra.currency
def test_core_update_invalid_language(self): response = self.app.post(url(controller='editor', action='core_update', dataset='cra'), params={'name': 'cra', 'label': 'CRA', 'languages': 'esperanto', 'description': 'I\'m a banana', 'currency': 'GBP', 'default_time': 2009}, extra_environ={'REMOTE_USER': '******'}) assert not 'updated' in response.body cra = Dataset.by_name('cra') assert not 'esperanto' in cra.languages
def test_new_no_apikey(self): u = url(controller='api/version2', action='create') params = { 'metadata': 'https://dl.dropbox.com/u/3250791/sample-openspending-model.json', 'csv_file': 'http://mk.ucant.org/info/data/sample-openspending-dataset.csv' } response = self.app.post(u, params, expect_errors=True) assert "400" in response.status assert not Dataset.by_name('openspending-example')
def test_dimensions_update_invalid_json(self): cra = Dataset.by_name('cra') cra.drop() cra.init() cra.generate() response = self.app.post(url(controller='editor', action='dimensions_update', dataset='cra'), params={'mapping': 'banana'}, extra_environ={'REMOTE_USER': '******'}, expect_errors=True) assert '400' in response.status, response.status
def list_cubes(self): cubes = [] for dataset in Dataset.all(): if not len(dataset.mapping): continue cubes.append({ #change here too 'name': dataset.name, 'label': dataset.label }) return cubes
def setup(self): super(TestDimensionController, self).setup() h.load_fixture('cra') h.clean_and_reindex_solr() self.cra = Dataset.by_name('cra') for dimension in self.cra.dimensions: if isinstance(dimension, CompoundDimension) and \ dimension.name == 'cofog1': members = list( dimension.members(dimension.alias.c.name == '3', limit=1)) self.member = members.pop() break