def test_file_switcher(self): '''Simple test that the contents of the file switcher dropdown are correct. ''' dataset = factories.Dataset() resource_1 = factories.Resource(dataset=dataset) resource_2 = factories.Resource(dataset=dataset) resource_3 = factories.Resource(dataset=dataset) response = self.app.get( toolkit.url_for(controller='package', action='resource_read', id=dataset['id'], resource_id=resource_1['id'])) # The dropdown should contain links to the two other files in the # package. soup = response.html links = soup.find('h1', class_='dropdown').find('ul').find_all('a') assert len(links) == 2 assert len([link for link in links if link['href'] == toolkit.url_for(controller='package', action='resource_read', id=dataset['id'], resource_id=resource_2['id'])]) == 1 assert len([link for link in links if link['href'] == toolkit.url_for(controller='package', action='resource_read', id=dataset['id'], resource_id=resource_3['id'])]) == 1
def test_delete_confirm_page(self): '''test the confirmation page renders and cancels correctly''' env = {'REMOTE_USER': self.owner['name'].encode('ascii')} response = self.app.get( url=toolkit.url_for('issues_delete', dataset_id=self.dataset['id'], issue_number=self.issue['number']), extra_environ=env, ) form = response.forms['ckanext-issues-confirm-delete'] # check the form target assert_equals( toolkit.url_for('issues_delete', dataset_id=self.dataset['id'], issue_number=self.issue['number']), form.action ) assert_equals([u'cancel', u'delete'], form.fields.keys()) response = helpers.submit_and_follow(self.app, form, env, 'cancel') # check we have been redirected without deletion assert_equals( toolkit.url_for('issues_show', dataset_id=self.dataset['id'], issue_number=self.issue['number']), response.request.path_qs )
def dcat_organization_list(context, data_dict): '''Return dcat details for organizations in the site. Return an array of objects (one per CKAN organization). ''' toolkit.check_access('dcat_organization_list', context, data_dict) data_dict = { 'all_fields': True, 'include_extras': True } organizations = toolkit.get_action('organization_list')(context=context, data_dict=data_dict) dcat_org_list = [] for org in organizations: dcat_org_data = { 'id': org['id'], 'dcat_validation_result': "{host}{path}".format( host=config.get('ckan.site_url').rstrip('/'), path=toolkit.url_for('dcat_validation', _id=org['name'])), 'dcat_validation': None, 'dcat_validation_date': None, } dcat_validation = toolkit.get_action('dcat_validation')(context, {'id': org['id']}) if dcat_validation and dcat_validation.get('result'): dcat_org_data['dcat_validation'] = dcat_validation['result']['errors'] == 0 dcat_org_data['dcat_validation_date'] = dcat_validation['last_validation'] else: dcat_org_data['dcat_validation'] = None dcat_org_data['dcat_validation_date'] = None # set original_dcat_metadata_url harvest_list = _harvest_list_for_org(context, org['id']) if not harvest_list: continue harvest_url = harvest_list[0].get('url', '') dcat_org_data.update({'original_dcat_metadata_url': harvest_url}) # set uri extras = org.get('extras', []) url = next((i['value'] for i in extras if i['key'] == 'url'), '') dcat_org_data.update({'url': url}) # set dcat_metadata_url dcat_metadata_url = (config.get('ckan.site_url').rstrip('/') + toolkit.url_for('dcat_organization', _id=org['name'], _format='rdf')) dcat_org_data.update({'dcat_metadata_url': dcat_metadata_url}) dcat_org_list.append(dcat_org_data) return dcat_org_list
def test_organization_member_create_with_max_editors_5(self): '''Test that setting max_editors to 5 in the config file works. If we edit the config, we should be allowed to create one admin and 4 editors, but no more. ''' config['ckan.birmingham.max_editors'] = '5' organization_admin = factories.User() organization = factories.Organization(user=organization_admin) editor_1 = factories.User() editor_2 = factories.User() editor_3 = factories.User() editor_4 = factories.User() # This should not fail - we should be allowed to have one admin and # three editors. for editor in (editor_1, editor_2, editor_3): helpers.call_action( 'organization_member_create', context={'user': organization_admin['name']}, id=organization['id'], username=editor['name'], role='editor') # At this point we have 4 "editors" (one admin and three editors) # and max_editors is 5, so we should be allowed to add one more editor. url = toolkit.url_for(controller='api', logic_function='organization_member_create', action='action', ver=3) data_dict = {'id': organization['id'], 'username': editor_4['name'], 'role': 'editor'} response = self.app.post_json( url, data_dict, extra_environ={'REMOTE_USER': str(organization_admin['name'])}) members = helpers.call_action('member_list', id=organization['id']) assert (organization_admin['id'], 'user', 'Admin') in members for editor in (editor_1, editor_2, editor_3, editor_4): assert (editor['id'], 'user', 'Editor') in members # At this point we should not be allowed to create any more editors # though. editor_5 = factories.User() url = toolkit.url_for(controller='api', logic_function='organization_member_create', action='action', ver=3) data_dict = {'id': organization['id'], 'username': editor_5['name'], 'role': 'editor'} response = self.app.post_json(url, data_dict, extra_environ={'REMOTE_USER': str(organization_admin['name'])}, status=403) # Test that we were denied for the right reason. # (This catches mistakes in the tests, for example if the test didn't # pass REMOTE_USER we would get a 403 but for a different reason.) assert response.json['error']['message'] == ("Access denied: You're " "only allowed to have 5 " "editors")
def test_import_zipfile(self): user = factories.User() organization = factories.Organization(user=user) env = {'REMOTE_USER': user['name'].encode('ascii')} url = toolkit.url_for('import_mapactionzip') params = { 'owner_org': organization['id'], } response = self.app.post( url, params, extra_environ=env, upload_files=[( 'upload', custom_helpers.get_test_zip().name, )], ) # Should redirect to dataset's page assert_equals(response.status_int, 302) slug = '189-ma001-aptivate-example' assert_regexp_matches( response.headers['Location'], '/dataset/edit/%s' % slug) # Should create the dataset dataset = helpers.call_action('package_show', id=slug) assert_equals(dataset['name'], slug) assert_equals(dataset['private'], True)
def test_click_visiblity_links(self): env = {"REMOTE_USER": self.owner["name"].encode("ascii")} response = self.app.get(url=toolkit.url_for("issues_dataset", dataset_id=self.dataset["id"]), extra_environ=env) # visible and hidden should be shown, but not closed assert_in("2 issues found", response) assert_in("visible_issue", response) assert_in("hidden_issue", response) assert_not_in("closed_issue", response) # click the hidden filter response = response.click(linkid="hidden-filter", extra_environ=env) assert_in("1 issue found", response) assert_not_in("visible_issue", response) assert_in("hidden_issue", response) assert_not_in("closed_issue", response) # click the visible filter response = response.click(linkid="visible-filter", extra_environ=env) assert_in("1 issue found", response) assert_in("visible_issue", response) assert_not_in("hidden_issue", response) assert_not_in("closed_issue", response) # clear the filter by clikcing on visible again response = response.click(linkid="visible-filter", extra_environ=env) assert_in("2 issues found", response) assert_in("visible_issue", response) assert_in("hidden_issue", response) assert_not_in("closed_issue", response)
def upload_file(self, object_type): name = request.params.get('name', '') # prefix upload_name = name + '-upload' if name else 'upload' upload = request.params.get(upload_name) if not isinstance(upload, cgi.FieldStorage): abort(400, 'Expected a file upload') result = None if object_type == 'resources': abort(400, 'Cannot handle uploading of resources here') elif object_type == 'source-metadata': up = uploader.MetadataUpload(upload.filename) up.update_data_dict(dict(request.params), upload_name) up.upload(max_size=1) link = toolkit.url_for( controller='ckanext.publicamundi.controllers.files:Controller', action='download_file', object_type=up.object_type, name_or_id=up.filename, filename=upload.filename) size = os.stat(u.filepath).st_size result = dict(name=u.filename, url=link, size=size) else: abort(404, 'Unknown object-type') response.headers['Content-Type'] = 'application/json' return [to_json(result)]
def identify(self): '''Identify which user (if any) is logged-in via WAAD. CKAN calls this on each page load. If a logged-in user is found, set toolkit.c.user to be their user name. ''' user = pylons.session.get('ckanext-oauth2waad-user') if user: toolkit.c.user = user endpoint = _waad_auth_token_endpoint() try: _refresh_access_token_if_expiring(pylons.session, _waad_client_id(), _waad_resource(), endpoint) except CannotRefreshAccessTokenError: domain_name = _get_domain_name_from_url(endpoint) logout_url = toolkit.url_for(controller='user', action='logout') message = toolkit._( "Refreshing your Windows Azure Active Directory OAuth 2.0 " "access token with {domain} failed. Some functionality " "may not be available. You can try " '<a href="{logout}">logging out</a> and logging in again ' "to fix the issue.").format(domain=domain_name, logout=logout_url) helpers.flash(message, category='alert-error', allow_html=True, ignore_duplicate=True)
def test_resource_upload_then_clear(self): '''Test that clearing an upload removes the S3 key''' sysadmin = factories.Sysadmin(apikey="my-test-key") app = self._get_test_app() demo = ckanapi.TestAppCKAN(app, apikey='my-test-key') dataset = factories.Dataset(name="my-dataset") file_path = os.path.join(os.path.dirname(__file__), 'data.csv') resource = demo.action.resource_create(package_id='my-dataset', upload=open(file_path), url='file.txt') key = '{1}/resources/{0}/data.csv' \ .format(resource['id'], config.get('ckanext.s3filestore.aws_storage_path')) conn = boto.connect_s3() bucket = conn.get_bucket('my-bucket') # test the key exists assert_true(bucket.lookup(key)) # clear upload url = toolkit.url_for(controller='package', action='resource_edit', id=dataset['id'], resource_id=resource['id']) env = {'REMOTE_USER': sysadmin['name'].encode('ascii')} app.post(url, {'clear_upload': True, 'url': 'http://asdf', 'save': 'save'}, extra_environ=env) # key shouldn't exist assert_false(bucket.lookup(key))
def test_issues_enabled_for_test_dataset(self): env = {'REMOTE_USER': self.owner['name'].encode('ascii')} response = self.app.get( url=toolkit.url_for('issues_home', package_id=self.dataset['id']), extra_environ=env, ) assert_equals(200, response.status_int)
def test_organization_pages_index(self): env = {'REMOTE_USER': self.user['name'].encode('ascii')} org = factories.Organization() url = toolkit.url_for('organization_pages_index', id=org['id']) response = self.app.get(url, status=200, extra_environ=env) assert_in('<h2>Pages</h2>', response.body) assert_in('Add page</a>', response.body)
def test_group_pages_index(self): env = {'REMOTE_USER': self.user['name'].encode('ascii')} group = factories.Group() url = toolkit.url_for('group_pages_index', id=group['id']) response = self.app.get(url, status=200, extra_environ=env) assert_in('<h2>Pages</h2>', response.body) assert_in('Add page</a>', response.body)
def test_import_datapackage(self, mock_requests): datapackage_url = 'http://www.foo.com/datapackage.json' datapackage = { 'name': 'foo', 'resources': [ { 'name': 'the-resource', 'url': 'http://www.somewhere.com/data.csv', } ] } mock_requests.register_uri('GET', datapackage_url, json=datapackage) user = factories.User() env = {'REMOTE_USER': user['name'].encode('ascii')} url = toolkit.url_for('import_datapackage', url=datapackage_url) response = self.app.post( url, extra_environ=env, ) # Should redirect to dataset's page assert_equals(response.status_int, 302) assert_regexp_matches(response.headers['Location'], '/dataset/foo$') # Should create the dataset dataset = helpers.call_action('package_show', id=datapackage['name']) assert_equals(dataset['name'], 'foo') assert_equals(len(dataset.get('resources', [])), 1) assert_equals(dataset['resources'][0].get('name'), 'the-resource') assert_equals(dataset['resources'][0].get('url'), datapackage['resources'][0]['url'])
def handle_submit(self, id): data = clean_dict(dict_fns.unflatten(tuplize_dict(parse_params( request.params)))) data['dataset_url'] = toolkit.url_for( controller='package', action='read', id=id, qualified=True ) package = get_action('package_show')(None, {'id': id}) self.fail_if_private(package, data['dataset_url']) # Comma separated config var to_addrs = config['ckanext.ands.support_emails'].split(',') subject = 'DataPortal Support: Request to publish dataset' body = base.render( 'package/doi_email.text', extra_vars=data) for email in to_addrs: mail_recipient('Dataportal support', email, subject, body) data['package_id'] = package['id'] data['user_id'] = c.userobj.id doi_request = DoiRequest(**data) Session.add(doi_request) Session.commit() h.flash_success("DOI Request sent") return toolkit.redirect_to(data['dataset_url'])
def dataset_export_dcat(context, data_dict): '''Export a dataset to RDF XML using GeoDCAT XSLT. :param id: the name or id of the dataset to be exported. :type id: string rtype: dict ''' pkg = _get_action('package_show')(context, data_dict) dtype = pkg.get('dataset_type') obj = pkg.get(dtype) if dtype else None cached_metadata = get_cache('metadata') result = None if obj: # Get a proper serializer xser = xml_serializer_for(obj) xser.target_namespace = pylons.config.get('ckan.site_url') name = '%(name)s@%(revision_id)s.dcat' % (pkg) cached = cached_metadata.get(name, createfunc=lambda: _transform_dcat(xser.to_xml())) link = toolkit.url_for( controller='ckanext.publicamundi.controllers.files:Controller', action='download_file', object_type='metadata', name_or_id=name, filename=('%(name)s.xml' % (pkg))) result = dict(url=link) return result
def test_member_create(self): '''Test that the member_create API is also blocked. Most of the tests above use organization_member_create. ''' organization_admin = factories.User() organization = factories.Organization(user=organization_admin) editor_1 = factories.User() helpers.call_action('member_create', context={'user': organization_admin['name']}, id=organization['id'], object=editor_1['id'], object_type='user', capacity='editor') editor_2 = factories.User() helpers.call_action('member_create', context={'user': organization_admin['name']}, id=organization['id'], object=editor_2['id'], object_type='user', capacity='editor') # We should not be able to create another editor. editor_3 = factories.User() url = toolkit.url_for(controller='api', logic_function='member_create', action='action', ver=3) data_dict = {'id': organization['id'], 'object': editor_3['id'], 'object_type': 'user', 'capacity': 'editor'} response = self.app.post_json(url, data_dict, extra_environ={'REMOTE_USER': str(organization_admin['name'])}, status=403) # Test that we were denied for the right reason. # (This catches mistakes in the tests, for example if the test didn't # pass REMOTE_USER we would get a 403 but for a different reason.) assert response.json['error']['message'] == ("Access denied: You're " "only allowed to have 3 " "editors")
def test_cant_enable_dataset_with_extra_with_this_config(self): '''test that issues are disabled even with the extra issues_enabled set''' self.app.get(toolkit.url_for( 'issues_dataset', dataset_id=self.dataset_misleading_extra['id']), status=404)
def test_edit_issue(self): # goto issue show page env = {'REMOTE_USER': self.owner['name'].encode('ascii')} response = self.app.get( url=toolkit.url_for('issues_show', dataset_id=self.dataset['id'], issue_number=self.issue['number']), extra_environ=env, ) # click the edit link response = response.click(linkid='issue-edit-link', extra_environ=env) # fill in the form form = response.forms['issue-edit'] form['title'] = 'edited title' form['description'] = 'edited description' # save the form response = helpers.webtest_submit(form, 'save', extra_environ=env) response = response.follow() # make sure it all worked assert_in('edited title', response) assert_in('edited description', response) result = helpers.call_action('issue_show', dataset_id=self.dataset['id'], issue_number=self.issue['number']) assert_equals(u'edited title', result['title']) assert_equals(u'edited description', result['description'])
def test_primary_key_is_on_page_list(self): dataset, resource, schema = self._create_resource() helpers.call_action( 'resource_schema_pkey_create', resource_id=resource['id'], pkey=["playerID", "teamID"] ) response = self.app.get( toolkit.url_for(controller='package', action='resource_read', id=dataset['id'], resource_id=resource['id'])) soup = response.html nose.tools.assert_true(soup.find(text='Primary key')) pkey = soup.find(id='primary-key') pkeys = pkey.find_all('li') nose.tools.assert_true( pkeys[0].a.attrs['href'], '/package/{0}/file/{1}/schema/0'.format( dataset['id'], resource['id']) ) nose.tools.assert_true(pkeys[0].a.text, 'playerID') nose.tools.assert_true( pkeys[1].a.attrs['href'], '/package/{0}/file/{1}/schema/0'.format( dataset['id'], resource['id']) ) nose.tools.assert_true(pkeys[1].a.text, 'teamID')
def test_search_box_submits_q_get(self): in_search = [issue_factories.Issue(user_id=self.owner['id'], dataset_id=self.dataset['id'], title=title) for title in ['some titLe', 'another Title']] # some issues not in the search [issue_factories.Issue(user_id=self.owner['id'], dataset_id=self.dataset['id'], title=title) for title in ['blah', 'issue']] issue_home = self.app.get( url=toolkit.url_for('issues_home', package_id=self.dataset['id'], id=self.issue['id']), ) search_form = issue_home.forms[1] search_form['q'] = 'title' res = search_form.submit() soup = bs4.BeautifulSoup(res.body) issue_links = soup.find(id='issue-list').find_all('h4') titles = set([i.a.text.strip() for i in issue_links]) assert_equals(set([i['title'] for i in in_search]), titles)
def test_search_box_appears_issue_dataset_page(self): response = self.app.get( url=toolkit.url_for("issues_dataset", dataset_id=self.dataset["id"], issue_number=self.issue["number"]) ) soup = bs4.BeautifulSoup(response.body) edit_button = soup.find("form", {"class": "search-form"}) assert_is_not_none(edit_button)
def test_doi_fields_new_unowned_dataset_only_in_orgs_false(self): '''The doi fields will display for unowned datasets when doi_request_only_in_orgs is False.''' app = self._get_test_app() response = app.get(url=toolkit.url_for(controller='package', action='new')) assert_true('doi_identifier' in response.forms['dataset-edit'].fields)
def test_marked_as_spam_does_not_appear_for_user(self): env = {'REMOTE_USER': self.user['name'].encode('ascii')} response = self.app.get( url=toolkit.url_for('issues_home', package_id=self.dataset['id']), extra_environ=env, ) assert_in('0 issues found', response.body)
def test_delete_button_appears_for_authed_user(self): env = {'REMOTE_USER': self.owner['name'].encode('ascii')} response = self.app.get( url=toolkit.url_for('issues_show', package_id=self.dataset['id'], id=self.issue['id']), extra_environ=env, ) form = response.forms['issue-comment-form'] soup = bs4.BeautifulSoup(form.text) delete_link = soup.find_all('a')[-1] # check the link of the delete assert_equals('Delete', delete_link.text) assert_equals( toolkit.url_for('issues_delete', dataset_id=self.dataset['id'], issue_id=self.issue['id']), delete_link.attrs['href'] )
def test_not_found_issue_raises_404(self): env = {'REMOTE_USER': self.owner['name'].encode('ascii')} response = self.app.get( url=toolkit.url_for('issues_show', package_id=self.dataset['id'], id='some nonsense'), extra_environ=env, expect_errors=True, ) assert_equals(response.status_int, 404)
def test_delete(self): env = {'REMOTE_USER': self.owner['name'].encode('ascii')} response = self.app.post( url=toolkit.url_for('issues_delete', dataset_id=self.dataset['id'], issue_id=self.issue['id']), extra_environ=env, ) # check we get redirected back to the issues overview page assert_equals(302, response.status_int) response = response.follow() assert_equals(200, response.status_int) assert_equals( toolkit.url_for('issues_home', package_id=self.dataset['id']), response.request.path ) # check the issue is now deleted. assert_raises(toolkit.ObjectNotFound, helpers.call_action, 'issue_show', id=self.issue['id'])
def test_csv_preview_unicode(self): '''Upload a unicode csv file, test that it handles unicode ''' dataset, resource, _ = self._create_resource('test-data/unicode.csv') response = self.app.get( toolkit.url_for(controller='package', action='resource_read', id=dataset['id'], resource_id=resource['id'])) nose.tools.assert_equals(200, response.status_int)
def test_reported_as_abuse_does_not_appear_in_search_to_user_who_reported_it(self): env = {'REMOTE_USER': self.reporter['name'].encode('ascii')} response = self.app.get( url=toolkit.url_for('issues_dataset', dataset_id=self.dataset['id']), extra_environ=env, ) res_chunks = parse_issues_dataset(response) assert_in('0 issues found', res_chunks['issues_found'])
def test_report_as_anonymous_user(self): response = self.app.post( url=toolkit.url_for('issues_report', dataset_id=self.dataset['id'], issue_number=self.issue['number']), ) response = response.follow() assert_in('You must be logged in to report issues', response.body)
def test_issues_disabled_for_test_dataset(self): '''test-dataset has no extra''' env = {'REMOTE_USER': self.owner['name'].encode('ascii')} response = self.app.get( url=toolkit.url_for('issues_home', package_id=self.dataset['id']), extra_environ=env, expect_errors=True ) assert_equals(404, response.status_int)
def _get_email_configuration(user_name, data_owner, dataset_name, email, message, organization, data_maintainers, only_org_admins=False): schema = logic.schema.update_configuration_schema() avaiable_terms = [ '{name}', '{data_maintainers}', '{dataset}', '{organization}', '{message}', '{email}' ] new_terms = [ user_name, data_maintainers, dataset_name, organization, message, email ] try: is_user_sysadmin = \ _get_action('user_show', {'id': c.user}).get('sysadmin') except NotFound: pass for key in schema: # get only email configuration if 'email_header' in key: email_header = config.get(key) elif 'email_body' in key: email_body = config.get(key) elif 'email_footer' in key: email_footer = config.get(key) if '{message}' not in email_body and not email_body and not email_footer: email_body += message return email_body for i in range(0, len(avaiable_terms)): if avaiable_terms[i] == '{dataset}' and new_terms[i]: url = toolkit.url_for(controller='package', action='read', id=new_terms[i], qualified=True) new_terms[i] = '<a href="' + url + '">' + new_terms[i] + '</a>' elif avaiable_terms[i] == '{organization}' and is_user_sysadmin: new_terms[i] = config.get('ckan.site_title') elif avaiable_terms[i] == '{data_maintainers}': if len(new_terms[i]) == 1: new_terms[i] = new_terms[i][0] else: maintainers = '' for j, term in enumerate(new_terms[i][:]): maintainers += term if j == len(new_terms[i]) - 2: maintainers += ' and ' elif j < len(new_terms[i]) - 1: maintainers += ', ' new_terms[i] = maintainers email_header = email_header.replace(avaiable_terms[i], new_terms[i]) email_body = email_body.replace(avaiable_terms[i], new_terms[i]) email_footer = email_footer.replace(avaiable_terms[i], new_terms[i]) if only_org_admins: owner_org = _get_action('package_show', { 'id': dataset_name }).get('owner_org') url = toolkit.url_for('requestdata_organization_requests', id=owner_org, qualified=True) email_body += '<br><br> This dataset\'s maintainer does not exist.\ Go to your organisation\'s <a href="' + url + '">Requested Data</a>\ page to see the new request. Please also edit the dataset and assign\ a new maintainer.' else: url = \ toolkit.url_for('requestdata_my_requests', id=data_owner, qualified=True) email_body += '<br><br><strong> Please accept or decline the request\ as soon as you can by visiting the \ <a href="' + url + '">My Requests</a> page.</strong>' organizations =\ _get_action('organization_list_for_user', {'id': data_owner}) package = _get_action('package_show', {'id': dataset_name}) if not only_org_admins: for org in organizations: if org['name'] in organization\ and package['owner_org'] == org['id']: url = \ toolkit.url_for('requestdata_organization_requests', id=org['name'], qualified=True) email_body += '<br><br> Go to <a href="' + url + '">\ Requested data</a> page in organization admin.' site_url = config.get('ckan.site_url') site_title = config.get('ckan.site_title') newsletter_url = config.get('ckanext.requestdata.newsletter_url', site_url) twitter_url = \ config.get('ckanext.requestdata.twitter_url', 'https://twitter.com') contact_email = config.get('ckanext.requestdata.contact_email', '') email_footer += """ <br/><br/> <small> <p> <a href=" """ + site_url + """ ">""" + site_title + """</a> </p> <p> <a href=" """ + newsletter_url + """ ">\ Sign up for our newsletter</a> | \ <a href=" """ + twitter_url + """ ">Follow us on Twitter</a>\ | <a href="mailto:""" + contact_email + """ ">Contact us</a> </p> </small> """ result = email_header + '<br><br>' + email_body + '<br><br>' + email_footer return result
def test_get_public_key_no_key_configured(app): url = toolkit.url_for('authz_service.public_key') response = app.get(url, status=204) assert not response.body
def get_mail_extra_vars(context, source_id, status): last_job = status['last_job'] source = get_action('harvest_source_show')(context, {'id': source_id}) report = get_action('harvest_job_report')(context, { 'id': status['last_job']['id'] }) obj_errors = [] job_errors = [] # List of error messages to suppress notifications for ignored_errors = ['No records to change'] for harvest_object_error_key in islice(report.get('object_errors'), 0, 20): harvest_object_error = report.get( 'object_errors')[harvest_object_error_key]['errors'] for error in harvest_object_error: if error['message'] not in ignored_errors: obj_errors.append(error['message']) ckan_site_url = config.get('ckan.site_url') job_url = toolkit.url_for('harvest_job_show', source=source['id'], id=last_job['id']) msg = 'This is a failure-notification of the latest harvest job on ' + ckan_site_url + '.\n\n' msg += 'Harvest Job URL: ' + ckan_site_url + job_url + '\n\n' msg += toolkit._('Harvest Source: {0}').format(source['title']) + '\n' if source.get('config'): msg += toolkit._('Harvester-Configuration: {0}').format( source['config']) + '\n' msg += '\n\n' if source['organization']: msg += toolkit._('Organization: {0}').format( source['organization']['name']) msg += '\n\n' for harvest_gather_error in islice(report.get('gather_errors'), 0, 20): if harvest_gather_error['message'] not in ignored_errors: job_errors.append(harvest_gather_error['message']) if source.get('organization'): organization = source['organization']['name'] else: organization = 'Not specified' msg += 'For help, please contact the NCAR Data Stewardship Coordinator (mailto:[email protected]).\n\n\n' harvest_configuration = source.get('config') if harvest_configuration in [None, '', '{}']: harvest_configuration = 'Not specified' errors = job_errors + obj_errors site_url = config.get('ckan.site_url') job_url = toolkit.url_for('harvest_job_show', source=source['id'], id=last_job['id']) full_job_url = urljoin(site_url, job_url) extra_vars = { 'organization': organization, 'site_title': config.get('ckan.site_title'), 'site_url': site_url, 'job_url': full_job_url, 'harvest_source_title': source['title'], 'harvest_configuration': harvest_configuration, 'job_finished': last_job['finished'], 'job_id': last_job['id'], 'job_created': last_job['created'], 'records_in_error': str(last_job['stats'].get('errored', 0)), 'records_added': str(last_job['stats'].get('added', 0)), 'records_deleted': str(last_job['stats'].get('deleted', 0)), 'records_updated': str(last_job['stats'].get('updated', 0)), 'error_summary_title': toolkit._('Error Summary'), 'obj_errors_title': toolkit._('Document Error'), 'job_errors_title': toolkit._('Job Errors'), 'obj_errors': obj_errors, 'job_errors': job_errors, 'errors': errors, } return extra_vars
def test_foreign_key_is_on_page_list(self): user = factories.User() dataset = factories.Dataset(user=user) csv_file0 = custom_helpers.get_csv_file( 'test-data/lahmans-baseball-database/BattingPost.csv') api = ckanapi.TestAppCKAN(self.app, apikey=user['apikey']) resource0 = api.action.resource_create(package_id=dataset['id'], upload=csv_file0) schema0 = api.action.resource_schema_show(resource_id=resource0['id']) csv_file1 = custom_helpers.get_csv_file( 'test-data/lahmans-baseball-database/AllstarFull.csv') api = ckanapi.TestAppCKAN(self.app, apikey=user['apikey']) resource1 = api.action.resource_create(package_id=dataset['id'], upload=csv_file1) schema1 = api.action.resource_schema_show(resource_id=resource1['id']) helpers.call_action( 'resource_schema_fkey_create', field=["playerID", "yearID"], resource_id=resource1['id'], referenced_resource_id=resource0['id'], referenced_field=["playerID", "yearID"], ) response = self.app.get( toolkit.url_for(controller='package', action='resource_read', id=dataset['id'], resource_id=resource1['id'])) soup = response.html nose.tools.assert_true(soup.find(text='Foreign keys')) fkey = soup.find(id='foreign-key') #check that the name of the referned csv file appears nose.tools.assert_true('BattingPost.csv' in fkey.text) fkeys = fkey.find_all('li') #check the source link nose.tools.assert_equals( fkeys[0].a.attrs['href'], '/package/{0}/file/{1}/schema/0'.format( dataset['id'], resource1['id']) ) nose.tools.assert_equals(fkeys[0].a.text, 'playerID') nose.tools.assert_equals( fkeys[1].a.attrs['href'], '/package/{0}/file/{1}/schema/1'.format( dataset['id'], resource1['id']) ) nose.tools.assert_equals(fkeys[1].a.text, 'yearID') #check the destination link nose.tools.assert_equals( fkeys[2].a.attrs['href'], '/package/{0}/file/{1}/schema/2'.format( dataset['id'], resource0['id']) ) nose.tools.assert_equals(fkeys[2].a.text, 'playerID') #check the destination link nose.tools.assert_equals( fkeys[3].a.attrs['href'], '/package/{0}/file/{1}/schema/0'.format( dataset['id'], resource0['id']) ) nose.tools.assert_equals(fkeys[3].a.text, 'yearID')
def do_if_user_not_sysadmin(): if not c.userobj: tk.redirect_to(tk.url_for(controller='user', action='login')) if not c.userobj.sysadmin: abort(404) # not 403 for security reasons
def get_absolute_url(self): return tk.url_for('forum_thread_show', slug=self.thread.board.slug, id=self.thread.id)
def test_edit_path(self): path = toolkit.url_for('organization_edit', id='roger') assert (path == '/organization/edit/roger')
def _send_request_mail(data): success = False try: resource_link = toolkit.url_for(action='read', controller='dataset_resource', id=data.get('package_name'), resource_id=data.get('resource_id')) resource_edit_link = toolkit.url_for( action='edit', controller='dataset_resource', id=data.get('package_name'), resource_id=data.get('resource_id')) extra_vars = { 'site_title': config.get('ckan.site_title'), 'site_url': config.get('ckan.site_url'), 'maintainer_name': data.get('maintainer_name', 'Maintainer'), 'user_id': data.get('user_id', 'the user id'), 'user_name': data.get('user_name', ''), 'user_email': data.get('user_email', ''), 'resource_name': data.get('resource_name', ''), 'resource_link': config.get('ckan.site_url') + resource_link, 'resource_edit_link': config.get('ckan.site_url') + resource_edit_link, 'package_name': data.get('resource_name', ''), 'message': data.get('message', ''), 'admin_email_to': config.get('email_to', 'email_to_undefined') } mail_template = 'restricted/emails/restricted_access_request.txt' body = render(mail_template, extra_vars) subject = \ _('Access Request to resource {0} ({1}) from {2}').format( data.get('resource_name', ''), data.get('package_name', ''), data.get('user_name', '')) email_dict = { data.get('maintainer_email'): extra_vars.get('maintainer_name'), extra_vars.get('admin_email_to'): '{} Admin'.format(extra_vars.get('site_title')) } headers = { 'CC': ",".join(email_dict.keys()), 'reply-to': data.get('user_email') } # CC doesn't work and mailer cannot send to multiple addresses for email, name in email_dict.items(): mailer.mail_recipient(recipient_name=name, recipient_email=email, subject='Fwd: ' + subject, body=body, body_html=None, headers=headers) # Special copy for the user (no links) email = data.get('user_email') name = data.get('user_name', 'User') extra_vars['resource_link'] = '[...]' extra_vars['resource_edit_link'] = '[...]' body = render('restricted/emails/restricted_access_request.txt', extra_vars) body_user = _('Please find below a copy of the access ' 'request mail sent. \n\n >> {}').format( body.replace("\n", "\n >> ")) mailer.mail_recipient(recipient_name=name, recipient_email=email, subject='Fwd: ' + subject, body=body_user, body_html=None, headers=headers) success = True except mailer.MailerException as mailer_exception: log.error('Can not access request mail after registration.') log.error(mailer_exception) return success
def get_url(self): return '{0}?groups={1}'.format( toolkit.url_for('mapaction_georss_event'), self.event_name)
def test_new_requires_user_to_be_able_to_create_packages(self): user = factories.User() env = {'REMOTE_USER': user['name'].encode('ascii')} url = toolkit.url_for('import_datapackage') response = self.app.get(url, extra_environ=env, status=[401]) assert_true('Unauthorized to create a dataset' in response.body)
def test_new_renders(self): user = factories.User() env = {'REMOTE_USER': user['name'].encode('ascii')} url = toolkit.url_for('import_datapackage') response = self.app.get(url, extra_environ=env) assert_equals(200, response.status_int)
def resource_download(self, id, resource_id, filename=None): ''' Provide a download by either redirecting the user to the url stored or downloading the uploaded file from S3. ''' context = { 'model': model, 'session': model.Session, 'user': c.user or c.author, 'auth_user_obj': c.userobj } try: rsc = get_action('resource_show')(context, {'id': resource_id}) get_action('package_show')(context, {'id': id}) except NotFound: abort(404, _('Resource not found')) except NotAuthorized: abort(401, _('Unauthorized to read resource %s') % id) if rsc.get('url_type') == 'upload': upload = uploader.get_resource_uploader(rsc) bucket_name = config.get('ckanext.s3filestore.aws_bucket_name') region = config.get('ckanext.s3filestore.region_name') host_name = config.get('ckanext.s3filestore.host_name') bucket = upload.get_s3_bucket(bucket_name) if filename is None: filename = os.path.basename(rsc['url']) key_path = upload.get_path(rsc['id'], filename) key = filename if key is None: log.warn('Key \'{0}\' not found in bucket \'{1}\''.format( key_path, bucket_name)) try: # Small workaround to manage downloading of large files # We are using redirect to minio's resource public URL s3 = upload.get_s3_session() client = s3.client(service_name='s3', endpoint_url=host_name) url = client.generate_presigned_url(ClientMethod='get_object', Params={ 'Bucket': bucket.name, 'Key': key_path }, ExpiresIn=60) redirect(url) except ClientError as ex: if ex.response['Error']['Code'] == 'NoSuchKey': # attempt fallback if config.get( 'ckanext.s3filestore.filesystem_download_fallback', False): log.info( 'Attempting filesystem fallback for resource {0}'. format(resource_id)) url = toolkit.url_for( controller= 'ckanext.s3filestore.controller:S3Controller', action='filesystem_resource_download', id=id, resource_id=resource_id, filename=filename) redirect(url) abort(404, _('Resource data not found')) else: raise ex
def get_absolute_url(self): return tk.url_for('apps_board_show', slug=self.slug)
def packagezip_url(pkg): return toolkit.url_for('zipfile', name=pkg.name)
def get_absolute_url(self): return tk.url_for('apps_app_show', id=self.id)
def copy(self, id): context = { 'model': m, 'session': m.Session, 'user': p.toolkit.c.user or p.toolkit.c.author, 'auth_user_obj': p.toolkit.c.userobj, 'save': 'save' in t.request.params, } # check permissions try: t.check_access('package_create', context) except t.NotAuthorized: t.abort(401, t._('Unauthorized to copy this package')) data_dict = {'id': id} data = t.get_action('package_show')(None, data_dict) # change dataset title and name data['name'] = '{}-copy'.format(data['name']) while True: try: _ = t.get_action('package_show')(None, { 'name_or_id': data['name'] }) except l.NotFound: break else: import random data['name'] = '{}-copy-{}'.format(data['name'], random.randint(1, 100)) data['title'] = 'Copy of {0}'.format(data['title']) # remove unnecessary attributes from the dataset remove_atts = [ 'id', 'revision_id', 'metadata_created', 'metadata_modified', 'resources', 'revision_timestamp' ] for attr in remove_atts: if attr in data: del data[attr] if data and 'type' in data: package_type = data['type'] else: package_type = self._guess_package_type(True) data = data or clean_dict( dict_fns.unflatten( tuplize_dict( parse_params(t.request.params, ignore_keys=CACHE_PARAMETERS)))) c.resources_json = h.json.dumps(data.get('resources', [])) # convert tags if not supplied in data if data and not data.get('tag_string'): data['tag_string'] = ', '.join( h.dict_list_reduce(data.get('tags', {}), 'name')) # if we are creating from a group then this allows the group to be # set automatically data['group_id'] = t.request.params.get('group') or \ t.request.params.get('groups__0__id') # in the phased add dataset we need to know that # we have already completed stage 1 stage = ['active'] if data.get('state', '').startswith('draft'): stage = ['active', 'complete'] form_snippet = self._package_form(package_type=package_type) form_vars = { 'data': data, 'errors': {}, 'error_summary': {}, 'action': 'new', 'stage': stage, 'dataset_type': package_type, } c.errors_json = h.json.dumps({}) # override form action to use built-in package controller c.form_action = t.url_for(controller='package', action='new') self._setup_template_variables(context, {}, package_type=package_type) new_template = self._new_template(package_type) extra_vars = { 'form_vars': form_vars, 'form_snippet': form_snippet, 'dataset_type': package_type } return t.render(new_template, extra_vars=extra_vars)
def test_email_hierarchy(self): africa = factories.DataContainer(name='africa', title='Africa') europe = factories.DataContainer(name='europe', title='Europe') americas = factories.DataContainer(name='americas', title='Americas') central_africa = factories.DataContainer( name='central-africa', title='Central Africa and the Great Lakes', groups=[{ 'name': africa['name'] }], ) eastern_europe = factories.DataContainer( name='eastern-europe', title='Eastern Europe', groups=[{ 'name': europe['name'] }], ) burundi = factories.DataContainer( name='burundi', title='Burundi', groups=[{ 'name': central_africa['name'] }], ) belarus = factories.DataContainer( name='belarus', title='Belarus', groups=[{ 'name': eastern_europe['name'] }], ) factories.Dataset( name='africa-dataset1', title='Africa Dataset 1', owner_org=africa['id'], ) factories.Dataset( name='central-africa-dataset1', title='Central Africa Dataset 1', owner_org=central_africa['id'], ) factories.Dataset( name='burundi-dataset1', title='Burundi Dataset 1', owner_org=burundi['id'], ) factories.Dataset( name='belarus-dataset1', title='Belarus Dataset 1', owner_org=belarus['id'], ) email = mailer.compose_summary_email_body(self.sysadmin) regularised_body = regularise_html(email['body']) expected_values = [ ''' <h1>New datasets (4)</h1> ''', ''' <h2> <a href="{}">Africa</a> (3) </h2> '''.format( toolkit.url_for('data-container.read', id='africa', qualified=True)), ''' <li> <a href="{dataset}">Burundi Dataset 1</a> in <a href="{container}">Burundi</a> </li> '''.format( dataset=toolkit.url_for('dataset.read', id='burundi-dataset1', qualified=True), container=toolkit.url_for('data-container.read', id='burundi', qualified=True), ), ''' <li> <a href="{dataset}">Central Africa Dataset 1</a> in <a href="{container}">Central Africa and the Great Lakes</a> </li> '''.format( dataset=toolkit.url_for('dataset.read', id='central-africa-dataset1', qualified=True), container=toolkit.url_for('data-container.read', id='central-africa', qualified=True), ), ''' <li> <a href="{dataset}">Africa Dataset 1</a> in <a href="{container}">Africa</a> </li> '''.format( dataset=toolkit.url_for('dataset.read', id='africa-dataset1', qualified=True), container=toolkit.url_for('data-container.read', id='africa', qualified=True), ), ''' <h2> <a href="{root}">Europe</a> (1) </h2> <ul> <li> <a href="{dataset}">Belarus Dataset 1</a> in <a href="{container}">Belarus</a> </li> </ul> '''.format( root=toolkit.url_for('data-container.read', id='europe', qualified=True), dataset=toolkit.url_for('dataset.read', id='belarus-dataset1', qualified=True), container=toolkit.url_for('data-container.read', id='belarus', qualified=True), ), ] assert 4 == email['total_events'] for ev in expected_values: assert regularise_html(ev) in regularised_body assert 'Americas' not in regularised_body assert (toolkit.url_for('data-container.read', id='americas', qualified=True) not in regularised_body)
def send_error_mail(context, source_id, status): last_job = status['last_job'] source = get_action('harvest_source_show')(context, {'id': source_id}) ckan_site_url = config.get('ckan.site_url') job_url = toolkit.url_for('harvest_job_show', source=source['id'], id=last_job['id']) msg = toolkit._('This is a failure-notification of the latest harvest job ({0}) set-up in {1}.')\ .format(job_url, ckan_site_url) msg += '\n\n' msg += toolkit._('Harvest Source: {0}').format(source['title']) + '\n' if source.get('config'): msg += toolkit._('Harvester-Configuration: {0}').format( source['config']) + '\n' msg += '\n\n' if source['organization']: msg += toolkit._('Organization: {0}').format( source['organization']['name']) msg += '\n\n' msg += toolkit._('Harvest Job Id: {0}').format(last_job['id']) + '\n' msg += toolkit._('Created: {0}').format(last_job['created']) + '\n' msg += toolkit._('Finished: {0}').format(last_job['finished']) + '\n\n' report = get_action('harvest_job_report')(context, { 'id': status['last_job']['id'] }) msg += toolkit._('Records in Error: {0}').format( str(last_job['stats'].get('errored', 0))) msg += '\n' obj_error = '' job_error = '' for harvest_object_error_key in islice(report.get('object_errors'), 0, 20): harvest_object_error = report.get( 'object_errors')[harvest_object_error_key]['errors'] for error in harvest_object_error: obj_error += error['message'] for harvest_gather_error in islice(report.get('gather_errors'), 0, 20): job_error += harvest_gather_error['message'] + '\n' if (obj_error != '' or job_error != ''): msg += toolkit._('Error Summary') msg += '\n' if (obj_error != ''): msg += toolkit._('Document Error') msg += '\n' + obj_error + '\n\n' if (job_error != ''): msg += toolkit._('Job Errors') msg += '\n' + job_error + '\n\n' if obj_error or job_error: msg += '\n--\n' msg += toolkit._('You are receiving this email because you are currently set-up as Administrator for {0}.' ' Please do not reply to this email as it was sent from a non-monitored address.')\ .format(config.get('ckan.site_title')) recipients = [] # gather sysadmins model = context['model'] sysadmins = model.Session.query( model.User).filter(model.User.sysadmin == True # noqa: E712 ).all() for sysadmin in sysadmins: recipients.append({'name': sysadmin.name, 'email': sysadmin.email}) # gather organization-admins if source.get('organization'): members = get_action('member_list')( context, { 'id': source['organization']['id'], 'object_type': 'user', 'capacity': 'admin' }) for member in members: member_details = get_action('user_show')(context, { 'id': member[0] }) if member_details['email']: recipients.append({ 'name': member_details['name'], 'email': member_details['email'] }) for recipient in recipients: email = { 'recipient_name': recipient['name'], 'recipient_email': recipient['email'], 'subject': config.get('ckan.site_title') + ' - Harvesting Job - Error Notification', 'body': msg } try: mailer.mail_recipient(**email) except mailer.MailerException: log.error( 'Sending Harvest-Notification-Mail failed. Message: ' + msg) except Exception as e: log.error(e) raise
def test_read_path(self): path = toolkit.url_for('organization_read', id='roger') assert (path == '/organization/roger')
def test_email_body(self): deposit = factories.DataContainer(id='data-deposit') target = factories.DataContainer(id='data-target') org = factories.DataContainer(name='test-org', title='Test Org') factories.Dataset( name='new-dataset', title='New Dataset', owner_org=org['id'], ) factories.Dataset( name='new-deposit', title='New Deposit', type='deposited-dataset', owner_org=deposit['id'], owner_org_dest=target['id'], curation_state='submitted', ) factories.Dataset( name='awaiting-review', title='Awaiting Review', type='deposited-dataset', owner_org=deposit['id'], owner_org_dest=target['id'], curation_state='review', ) old_dataset = factories.Dataset( name='old-dataset', title='Old Dataset', owner_org=org['id'], ) # This is a little bit messy. # We can't set the `metadata_created` property via a factory or an # action and the default is set in postgres so freezegun doesn't help. # So we will update the value directly using SQLAlchemy: model.Session.query( model.Package).filter_by(id=old_dataset['id']).update( {"metadata_created": datetime.now() - timedelta(days=8)}) model.Session.commit() # ..and then refresh the search index # so that the record is up-to-date when we query solr search.rebuild(package_id=old_dataset['id']) expected_values = [ ''' <h1>New datasets (1)</h1> <h2> <a href="{org}">Test Org</a> (1) </h2> <ul> <li> <a href="{ds}">New Dataset</a> in <a href="{org}">Test Org</a> </li> </ul>''' .format(ds=toolkit.url_for('dataset.read', id='new-dataset', qualified=True), org=toolkit.url_for('data-container.read', id='test-org', qualified=True)), ''' <h2>New deposited datasets (1)</h2> <ul> <li> <a href="{}">New Deposit</a> </li> </ul>'''.format( toolkit.url_for('deposited-dataset.read', id='new-deposit', qualified=True)), ''' <h2>Datasets awaiting review (1)</h2> <ul> <li> <a href="{}">Awaiting Review</a> </li> </ul>'''.format( toolkit.url_for('deposited-dataset.read', id='awaiting-review', qualified=True)), ] email = mailer.compose_summary_email_body(self.sysadmin) regularised_body = regularise_html(email['body']) assert 3 == email['total_events'] for ev in expected_values: assert regularise_html(ev) in regularised_body assert 'Old Dataset' not in regularised_body assert (toolkit.url_for("dataset.read", id="old-dataset", qualified=True) not in regularised_body)
def datapackage_url(pkg): return toolkit.url_for('datapackage', name=pkg.name)
def test_upload_form(self): if request.method == 'POST': h.flash('Thanks for uploading data', 'alert-info') redirect(toolkit.url_for('/dataset')) return render('tests/upload-form.html')
def test_blog_index(self): env = {'REMOTE_USER': self.user['name'].encode('ascii')} url = toolkit.url_for('blog_index') response = self.app.get(url, status=200, extra_environ=env) assert_in('<h2>Blog</h2>', response.body) assert_in('Add Article</a>', response.body)
def resource_download(self, id, resource_id, filename=None): ''' Provide a download by either redirecting the user to the url stored or downloading the uploaded file from S3. ''' context = { 'model': model, 'session': model.Session, 'user': c.user or c.author, 'auth_user_obj': c.userobj } try: rsc = get_action('resource_show')(context, {'id': resource_id}) get_action('package_show')(context, {'id': id}) except NotFound: abort(404, _('Resource not found')) except NotAuthorized: abort(401, _('Unauthorized to read resource %s') % id) if rsc.get('url_type') == 'upload': upload = uploader.get_resource_uploader(rsc) bucket_name = config.get('ckanext.s3filestore.aws_bucket_name') region = config.get('ckanext.s3filestore.region_name') bucket = upload.get_s3_bucket(bucket_name) if filename is None: filename = os.path.basename(rsc['url']) key_path = upload.get_path(rsc['id'], filename) key = filename if key is None: log.warn('Key \'{0}\' not found in bucket \'{1}\''.format( key_path, bucket_name)) try: obj = bucket.Object(key_path) contents = str(obj.get()['Body'].read()) except ClientError as ex: if ex.response['Error']['Code'] == 'NoSuchKey': # attempt fallback if config.get( 'ckanext.s3filestore.filesystem_download_fallback', False): log.info( 'Attempting filesystem fallback for resource {0}'. format(resource_id)) url = toolkit.url_for( controller= 'ckanext.s3filestore.controller:S3Controller', action='filesystem_resource_download', id=id, resource_id=resource_id, filename=filename) redirect(url) abort(404, _('Resource data not found')) else: raise ex dataapp = paste.fileapp.DataApp(contents) try: status, headers, app_iter = request.call_application(dataapp) except OSError: abort(404, _('Resource data not found')) response.headers.update(dict(headers)) response.status = status content_type, x = mimetypes.guess_type(rsc.get('url', '')) if content_type: response.headers['Content-Type'] = content_type return app_iter elif 'url' not in rsc: abort(404, _('No download is available')) redirect(str(rsc['url']))
def submit(self, resource_id, cn): try: workflow_url = config['ckanext.access.workflow_url'] except KeyError: flash_error( "Please set ckanext.access.workflow_url in order to submit access requests." ) redirect_to("get_access_request", resource_id=resource_id, cn=cn) try: workflow_user = config['ckanext.access.workflow_user'] except KeyError: flash_error( "Please set ckanext.access.workflow_user in order to submit access requests." ) redirect_to("get_access_request", resource_id=resource_id, cn=cn) try: workflow_pass = config['ckanext.access.workflow_pass'] except KeyError: flash_error( "Please set ckanext.access.workflow_pass in order to submit access requests." ) redirect_to("get_access_request", resource_id=resource_id, cn=cn) try: dns = config.get("ckanext.cfpb_ldap_query.base_dns").split('|') except ValueError: flash_error("At least one valid DN must be configured.") redirect_to("get_access_request", resource_id=resource_id, cn=cn) resource = get_action('resource_show')({}, data_dict={ 'id': resource_id }) package = get_action('package_show')({}, data_dict={ 'id': resource['package_id'] }) role_description = get_role(json.loads(resource['db_roles']), cn)['description'] dataset_url = '://'.join(get_site_protocol_and_host()) + url_for( controller='package', action='read', id=package['name']) workflow_json = { "workflowArgs": { "datasetTitle": package['title'], "groupDN": "CN={},{}".format(cn, dns[0]), "sAMAccountName": request.POST['user'], "dataStewardEmail": package['contact_primary_email'], "dataStewardEmail2": package['contact_secondary_email'], "description": role_description, "usageRestriction": package['usage_restrictions'], "justification": request.POST['justification'], "accessRestriction": package['access_restrictions'], "url": dataset_url } } try: response = requests.post(workflow_url, json=workflow_json, auth=HTTPBasicAuth( workflow_user, workflow_pass), verify=False) flash_notice( "Access request has been sent, you will recieve email updates on the status of the request as it is processed." ) except Exception as e: flash_error( "Error occurred submitting request: {} with content {}".format( e, workflow_json)) redirect_to("get_access_request", resource_id=resource_id, cn=cn) return redirect_to("dataset_read", id=package['id'])
def _send_request_mail(self, data): try: dataset_name = data['package_name'] resource_id = data['resource_id'] context = { 'model': model, 'session': model.Session, 'ignore_auth': True } dataset = toolkit.get_action('package_show')(context, { 'id': dataset_name }) resource_link = toolkit.url_for('{}_resource.read'.format( dataset['type']), id=dataset_name, resource_id=resource_id) resource_edit_link = toolkit.url_for('{}_resource.edit'.format( dataset['type']), id=dataset_name, resource_id=resource_id) extra_vars = { 'site_title': config.get('ckan.site_title'), 'site_url': config.get('ckan.site_url'), 'maintainer_name': data.get('maintainer_name', 'Maintainer'), 'user_id': data.get('user_id', 'the user id'), 'user_name': data.get('user_name', ''), 'user_email': data.get('user_email', ''), 'resource_name': data.get('resource_name', ''), 'resource_link': config.get('ckan.site_url') + resource_link, 'resource_edit_link': config.get('ckan.site_url') + resource_edit_link, 'package_name': data.get('resource_name', ''), 'message': data.get('message', ''), 'admin_email_to': config.get('email_to', 'email_to_undefined') } body = render_jinja2( 'restricted/emails/restricted_access_request.txt', extra_vars) subject = \ _('Access Request to resource {0} ({1}) from {2}').format( data.get('resource_name', ''), data.get('package_name', ''), data.get('user_name', '')) email_dict = { data.get('maintainer_email'): extra_vars.get('maintainer_name'), extra_vars.get('admin_email_to'): '{} Admin'.format(extra_vars.get('site_title')) } dataset_org = toolkit.get_action('organization_show')( context, { 'id': dataset['owner_org'], 'include_users': True }) dataset_org_admin_ids = [ user['id'] for user in dataset_org['users'] if user['capacity'] == 'admin' ] # fetch users directly from db to get non-hashed emails dataset_org_admins = model.Session.query(model.User).filter( model.User.id.in_(dataset_org_admin_ids), model.User.email.isnot(None)).all() email_dict.update( {user.email: user.name for user in dataset_org_admins}) headers = { 'CC': ",".join(email_dict.keys()), 'reply-to': data.get('user_email') } # CC doesn't work and mailer cannot send to multiple addresses for email, name in email_dict.iteritems(): mailer.mail_recipient(name, email, subject, body, headers=headers) # Special copy for the user (no links) email = data.get('user_email') name = data.get('user_name', 'User') extra_vars['resource_link'] = '[...]' extra_vars['resource_edit_link'] = '[...]' body = render_jinja2( 'restricted/emails/restricted_access_request.txt', extra_vars) body_user = _('Please find below a copy of the access ' 'request mail sent. \n\n >> {}').format( body.replace("\n", "\n >> ")) mailer.mail_recipient(name, email, 'Fwd: ' + subject, body_user, headers=headers) return SEND_SUCCESS except mailer.MailerException as mailer_exception: log.error('Can not access request mail after registration.') log.error(mailer_exception) except Exception as e: log.exception("Failed to prepare the request email.", e) return SEND_FAILED
def unban_user(self, user_id): if not c.userobj or not c.userobj.sysadmin: abort(404) BannedUser.unban(user_id) flash_success(tk._('User was successfully unbanned')) return tk.redirect_to(tk.url_for('forum_banned_users'))
def test_create_user_via_get_shows_dgu_register_page(self): app = self._get_test_app() response = app.get(url=url_for(controller='user', action='register'), status=200) assert 'https://data.gov.uk/support' in response
def get_url(self): return toolkit.url_for('mapaction_georss_dataset')
def _send_request_mail(self, data): success = False try: resource_link = toolkit.url_for( action='resource_read', controller='package', id=data.get('package_name'), resource_id=data.get('resource_id')) resource_edit_link = toolkit.url_for( action='resource_edit', controller='package', id=data.get('package_name'), resource_id=data.get('resource_id')) dashboard_restricted = config.get('ckan.site_url') + '/dashboard/restricted' extra_vars = { 'site_title': config.get('ckan.site_title'), 'site_url': config.get('ckan.site_url'), 'maintainer_name': data.get('maintainer_name', 'Maintainer'), 'user_id': data.get('user_id', 'the user id'), 'user_name': data.get('user_name', ''), 'user_email': data.get('user_email', ''), 'resource_name': data.get('resource_name', ''), 'resource_link': config.get('ckan.site_url') + resource_link, 'resource_edit_link': config.get('ckan.site_url') + resource_edit_link, 'package_name': data.get('pkg_dict').get('title', ''), 'message': data.get('message', ''), 'dashboard_restricted': dashboard_restricted, 'admin_email_to': config.get('email_to', 'email_to_undefined')} # authorized or not users if 'user_id' in data: body = render_jinja2('restricted/emails/restricted_access_request.txt', extra_vars) else: body = render_jinja2('restricted/emails/restricted_access_unauth_request.txt', extra_vars) subject = \ _('Αίτημα πρόσβασης στο {0} από τον χρήστη {1}').format( data.get('resource_name', ''), data.get('user_name', data.get('user_email'))) email_dict = { data.get('maintainer_email'): extra_vars.get('maintainer_name') #extra_vars.get('admin_email_to'): '{} Admin'.format(extra_vars.get('site_title')) } headers = { 'CC': ",".join(email_dict.keys()), 'reply-to': data.get('user_email')} # CC doesn't work and mailer cannot send to multiple addresses for email, name in email_dict.iteritems(): mailer.mail_recipient(name, email, subject, body, headers) # Special copy for the user (no links) email = data.get('user_email') name = data.get('user_name', 'User') extra_vars['resource_link'] = '[...]' extra_vars['resource_edit_link'] = '[...]' if 'user_id' in data: body = render_jinja2( 'restricted/emails/restricted_access_request.txt', extra_vars) else: body = render_jinja2( 'restricted/emails/restricted_access_unauth_request.txt', extra_vars) body_user = _( 'Παρακαλώ δείτε παρακάτω ένα αντίγραφο του αίτηματος πρόσβασης ' 'που στάλθηκε. \n\n >> {}' ).format(body.replace("\n", "\n >> ")) mailer.mail_recipient( name, email, 'Fwd: ' + subject, body_user, headers) success = True except mailer.MailerException as mailer_exception: log.error('Can not access request mail after registration.') log.error(mailer_exception) # save request to the database if 'user_id' in data: request_dict = {'resource_id':data.get('resource_id'), 'message':data.get('message'), 'owner_id':data.get('pkg_dict').get('creator_user_id'), 'user_id':data.get('user_name')} else: request_dict = {'resource_id':data.get('resource_id'), 'message':data.get('message'), 'owner_id':data.get('pkg_dict').get('creator_user_id'), 'request_email':data.get('user_email')} ext_logic.save_restricted_request(request_dict) return success