def test_project_kpi_range_valid(self): """Test PROJECT valid kpi range thresholds.""" self.register() self.signin() configs = { 'WTF_CSRF_ENABLED': True, 'PRODUCTS_SUBPRODUCTS': { 'north': ['winterfell'], 'west': ['westeros'] } } with patch.dict(self.flask_app.config, configs): # Valid kpi at minimum threshold of 0.1. url = '/project/new' project = dict(name='kpimin', short_name='kpimin', long_description='kpimin', password='******', product='north', subproduct='winterfell', kpi=0.1, input_data_class='L4 - public', output_data_class='L4 - public') csrf = self.get_csrf(url) res = self.app_post_json(url, headers={'X-CSRFToken': csrf}, data=project) data = json.loads(res.data) assert data.get('status') == SUCCESS, data proj_repo = project_repo.get(1) assert proj_repo.info['kpi'] == project['kpi'], 'kpi is valid' # Valid kpi at maximum threshold of 120. url = '/project/new' project = dict(name='kpimax', short_name='kpimax', long_description='kpimax', password='******', product='north', subproduct='winterfell', kpi=120, input_data_class='L4 - public', output_data_class='L4 - public') csrf = self.get_csrf(url) res = self.app_post_json(url, headers={'X-CSRFToken': csrf}, data=project) data = json.loads(res.data) assert data.get('status') == SUCCESS, data proj_repo = project_repo.get(2) assert proj_repo.info['kpi'] == project['kpi'], 'kpi is valid'
def test_clone_project_copy_assigned_users(self): from pybossa.view.projects import data_access_levels admin = UserFactory.create() user2 = UserFactory.create() assign_users = [admin.id, user2.id] task_presenter = 'test; url="project/oldname/" pybossa.run("oldname"); test;' project = ProjectFactory.create(id=40, short_name='oldname', info={'task_presenter': task_presenter, 'quiz': {'test': 123}, 'enrichments': [{'test': 123}], 'project_users': assign_users, 'passwd_hash': 'testpass', 'ext_config': {'test': 123} }, owner=admin) with patch.dict(data_access_levels, self.patch_data_access_levels): data = {'short_name': 'newproj', 'name': 'newproj', 'password': '******', 'copy_users': True, 'input_data_class': 'L4 - public','output_data_class': 'L4 - public'} url = '/project/%s/clone?api_key=%s' % (project.short_name, project.owner.api_key) res = self.app.post(url, data=data) new_project = project_repo.get(1) old_project = project_repo.get(40) task_presenter_expected = 'test; url="project/newproj/" pybossa.run("newproj"); test;' assert old_project.info['passwd_hash'] == 'testpass', old_project.info['passwd_hash'] assert new_project.get_project_users() == assign_users, new_project.get_project_users() assert new_project.info['task_presenter'] == task_presenter_expected, new_project.info['task_presenter'] assert new_project.info.get('enrichments') == None, new_project.info.get('enrichments') assert new_project.info.get('quiz') == None, new_project.info.get('quiz') assert new_project.info.get('ext_config') == {'test': 123}, new_project.info.get('ext_config') assert new_project.owner_id == admin.id, new_project.owner_id assert new_project.owners_ids == [admin.id], new_project.owners_ids
def test_warn_project_excludes_completed_projects(self, clean_mock): """Test JOB email excludes completed projects.""" from pybossa.core import mail with mail.record_messages() as outbox: date = '2010-10-22T11:02:00.000000' owner = UserFactory.create(consent=True, subscribed=True) project = ProjectFactory.create(updated=date, contacted=False, owner=owner) TaskFactory.create(created=date, project=project, state='completed') project_id = project.id project = project_repo.get(project_id) project.updated = date project_repo.update(project) project = ProjectFactory.create(updated=date, contacted=False, owner=owner) TaskFactory.create(created=date, project=project, state='ongoing') project_id = project.id project = project_repo.get(project_id) project.updated = date project_repo.update(project) warn_old_project_owners() assert len(outbox) == 1, outbox subject = 'Your PYBOSSA project: %s has been inactive' % project.name assert outbox[0].subject == subject err_msg = "project.contacted field should be True" assert project.contacted, err_msg err_msg = "project.published field should be False" assert project.published is False, err_msg err_msg = "cache of project should be cleaned" clean_mock.assert_called_with(project_id), err_msg err_msg = "The update date should be different" assert project.updated != date, err_msg
def test_warn_project_excludes_completed_projects(self, clean_mock): """Test JOB email excludes completed projects.""" from pybossa.core import mail with mail.record_messages() as outbox: date = '2010-10-22T11:02:00.000000' project = ProjectFactory.create(updated=date, contacted=False) TaskFactory.create(created=date, project=project, state='completed') project_id = project.id project = project_repo.get(project_id) project.updated = date project_repo.update(project) project = ProjectFactory.create(updated=date, contacted=False) TaskFactory.create(created=date, project=project, state='ongoing') project_id = project.id project = project_repo.get(project_id) project.updated = date project_repo.update(project) warn_old_project_owners() assert len(outbox) == 1, outbox subject = 'Your PyBossa project: %s has been inactive' % project.name assert outbox[0].subject == subject err_msg = "project.contacted field should be True" assert project.contacted, err_msg err_msg = "project.published field should be False" assert project.published is False, err_msg err_msg = "cache of project should be cleaned" clean_mock.assert_called_with(project_id), err_msg err_msg = "The update date should be different" assert project.updated != date, err_msg
def test_project_prod_subp_kpi(self): """Test PROJECT new/update works for product, subproduct, kpi.""" self.register() self.signin() configs = { 'WTF_CSRF_ENABLED': True, 'PRODUCTS_SUBPRODUCTS': { 'north': ['winterfell'], 'west': ['westeros'] } } with patch.dict(self.flask_app.config, configs): # new project url = '/project/new' project = dict(name='greatwar', short_name='gr8w', long_description='great war', password='******', product='north', subproduct='winterfell', kpi=1, input_data_class='L4 - public', output_data_class='L4 - public') csrf = self.get_csrf(url) res = self.app_post_json(url, headers={'X-CSRFToken': csrf}, data=project) data = json.loads(res.data) assert data.get('status') == SUCCESS, data proj_repo = project_repo.get(1) assert proj_repo.info['product'] == project['product'], 'product has not been set as expected' assert proj_repo.info['subproduct'] == project['subproduct'], 'subproduct has not been set as expected' assert proj_repo.info['kpi'] == project['kpi'], 'kpi has not been set as expected' # update project url = '/project/%s/update' % project['short_name'] project = dict(name='greatwar', description=proj_repo.description, id=proj_repo.id, category_id=proj_repo.category_id, product='west', subproduct='westeros', kpi=2, input_data_class='L4 - public', output_data_class='L4 - public') res = self.app_post_json(url, headers={'X-CSRFToken': csrf}, data=project) data = json.loads(res.data) assert data.get('status') == SUCCESS, data proj_repo = project_repo.get(1) assert proj_repo.info['product'] == project['product'], 'product has not been set as expected' assert proj_repo.info['subproduct'] == project['subproduct'], 'subproduct has not been set as expected' assert proj_repo.info['kpi'] == project['kpi'], 'kpi has not been set as expected' # incorrect vals results error url = '/project/new' project = dict(name='greatwar2', short_name='gr8w2', long_description='great war', password='******', product='wrongp', subproduct='wrongsubp', kpi='aa', input_data_class='L4 - public', output_data_class='L4 - public') csrf = self.get_csrf(url) res = self.app_post_json(url, headers={'X-CSRFToken': csrf}, data=project) data = json.loads(res.data) err_msg = {'kpi': ['This field is required.'], 'product': ['Not a valid choice'], 'subproduct': ['Not a valid choice']} assert data.get('errors') and data['form']['errors'] == err_msg, data
def test_warn_project_owner(self, mail, clean_mock): """Test JOB email is sent to warn project owner.""" # Mock for the send method send_mock = MagicMock() send_mock.send.return_value = True # Mock for the connection method connection = MagicMock() connection.__enter__.return_value = send_mock # Join them mail.connect.return_value = connection date = '2010-10-22T11:02:00.000000' owner = UserFactory.create(consent=True, subscribed=True) project = ProjectFactory.create(updated=date, owner=owner) project_id = project.id warn_old_project_owners() project = project_repo.get(project_id) err_msg = "mail.connect() should be called" assert mail.connect.called, err_msg err_msg = "conn.send() should be called" assert send_mock.send.called, err_msg err_msg = "project.contacted field should be True" assert project.contacted, err_msg err_msg = "project.published field should be False" assert project.published is False, err_msg err_msg = "cache of project should be cleaned" clean_mock.assert_called_with(project_id), err_msg err_msg = "The update date should be different" assert project.updated != date, err_msg
def test_warn_project_owner_not_consent(self, mail, clean_mock): """Test JOB email is not sent to warn project owner as not consent.""" # Mock for the send method send_mock = MagicMock() send_mock.send.return_value = True # Mock for the connection method connection = MagicMock() connection.__enter__.return_value = send_mock # Join them mail.connect.return_value = connection date = '2010-10-22T11:02:00.000000' owner = UserFactory.create(consent=False, subscribed=True) project = ProjectFactory.create(updated=date, owner=owner) project_id = project.id warn_old_project_owners() project = project_repo.get(project_id) err_msg = "mail.connect() should be called" assert mail.connect.called, err_msg err_msg = "conn.send() should not be called" assert send_mock.send.called is False, err_msg err_msg = "project.contacted field should be False" assert project.contacted is False, err_msg err_msg = "project.published field should be True" assert project.published is True, err_msg err_msg = "The update date should be different" assert project.updated == date, err_msg
def create_webhooks(projectId): """Create webhooks for a given project ID.""" from pybossa.core import project_repo, task_repo, result_repo from pybossa.model.webhook import Webhook project = project_repo.get(projectId) tasks = task_repo.filter_tasks_by(state='completed', project_id=projectId) print "Analyzing %s tasks" % len(tasks) for task in tasks: sql = text( '''select * from webhook where project_id=:project_id and payload@>'{"task_id": :task_id}';''') results = db.engine.execute( sql, project_id=project.id, task_id=task.id) webhook = None for result in results: webhook = result if not webhook: result = result_repo.get_by(project_id=project.id, task_id=task.id) payload = dict( fired_at=None, project_short_name=project.short_name, project_id=project.id, task_id=task.id, result_id=result.id, event='task_completed') wh = Webhook(project_id=projectId, payload=payload) print(wh) db.session.add(wh) db.session.commit() print "Project %s completed!" % project.short_name
def project_export(id): from pybossa.core import project_repo, json_exporter, csv_exporter app = project_repo.get(id) if app is not None: print "Export project id %d" % id json_exporter.pregenerate_zip_files(app) csv_exporter.pregenerate_zip_files(app)
def _retrieve_new_task(project_id): project = project_repo.get(project_id) if project is None: raise NotFound if not project.allow_anonymous_contributors and current_user.is_anonymous(): info = dict( error="This project does not allow anonymous contributors") error = model.task.Task(info=info) return error if request.args.get('offset'): offset = int(request.args.get('offset')) else: offset = 0 user_id = None if current_user.is_anonymous() else current_user.id user_ip = request.remote_addr if current_user.is_anonymous() else None if request.headers.get('remote_mobile_addr') is not None : user_ip = request.headers.get('remote_mobile_addr') if current_user.is_anonymous() else None else: if request.headers.getlist("X-Forwarded-For"): user_ip = request.headers.getlist("X-Forwarded-For")[0] if ',' in user_ip: ips = user_ip.split(",") for ip in ips: user_ip = ip print "_retrieve_new_task %s." % user_id print "_retrieve_new_task %s." % user_ip print "project.info.get('sched') %s." % project.info.get('sched') task = sched.new_task(project_id, project.info.get('sched'), user_id, user_ip, offset) return task
def test_project_created_with_volume_avatar(self, mock_importer): """Test that a project is created with the volume's avatar.""" self.register() self.signin() vol = dict(id='123abc', name='My Volume', container='foo', thumbnail='bar.png', thumbnail_url='/foo/bar.png') category = CategoryFactory() tmpl_fixtures = TemplateFixtures(category) select_task = tmpl_fixtures.iiif_select_tmpl tmpl = tmpl_fixtures.create(task_tmpl=select_task) category.info = dict(presenter='iiif-annotation', volumes=[vol], templates=[tmpl]) project_repo.update_category(category) endpoint = '/lc/projects/{}/new'.format(category.short_name) form_data = dict(name='foo', short_name='bar', template_id=tmpl['id'], volume_id=vol['id']) res = self.app_post_json(endpoint, data=form_data) project = project_repo.get(1) # Check project avatar details assert_equal(project.info['container'], vol['container']) assert_equal(project.info['thumbnail'], vol['thumbnail']) assert_equal(project.info['thumbnail_url'], vol['thumbnail_url'])
def test_z3950_project_creation(self, mock_importer): """Test that a Z39.50 project is created.""" mock_importer.count_tasks_to_import.return_value = 1 self.register() self.signin() vol = dict(id='123abc', name='My Volume', importer='z3950', data=dict(album_id=self.flickr_album_id)) category = CategoryFactory() tmpl_fixtures = TemplateFixtures(category) z3950_task = tmpl_fixtures.z3950_tmpl tmpl = tmpl_fixtures.create(task_tmpl=z3950_task) category.info = dict(presenter='z3950', volumes=[vol], templates=[tmpl]) project_repo.update_category(category) endpoint = '/lc/projects/{}/new'.format(category.short_name) form_data = dict(name='foo', short_name='bar', template_id=tmpl['id'], volume_id=vol['id']) res = self.app_post_json(endpoint, data=form_data) res_data = json.loads(res.data) msg = 'The project was generated with 1 task.' assert_equal(res_data['flash'], msg) project = project_repo.get(1) # Check correct task data imported expected = call(task_repo, project.id, type='z3950', album_id=self.flickr_album_id) assert_equal(mock_importer.create_tasks.call_args_list, [expected])
def test_task_priority_external_uid(self): """Test SCHED respects priority_0 field for externa uid""" project = ProjectFactory.create(info=dict(sched='depth_first_all'), owner=UserFactory.create(id=500)) TaskFactory.create_batch(10, project=project) # By default, tasks without priority should be ordered by task.id (FIFO) tasks = db.session.query(Task).filter_by(project_id=1).order_by('id').all() project = project_repo.get(1) headers = self.get_headers_jwt(project) url = 'api/project/%s/newtask?external_uid=342' % project.id res = self.app.get(url, headers=headers) task1 = json.loads(res.data) # Check that we received a Task err_msg = "Task.id should be the same" assert task1.get('id') == tasks[0].id, err_msg # Now let's change the priority to a random task import random t = random.choice(tasks) # Increase priority to maximum t.priority_0 = 1 db.session.add(t) db.session.commit() # Request again a new task res = self.app.get(url + '&orderby=priority_0&desc=true', headers=headers) task1 = json.loads(res.data) # Check that we received a Task err_msg = "Task.id should be the same" assert task1.get('id') == t.id, (err_msg, task1, t) err_msg = "Task.priority_0 should be the 1" assert task1.get('priority_0') == 1, err_msg
def user_progress(project_id=None, short_name=None): """API endpoint for user progress. Return a JSON object with two fields regarding the tasks for the user: { 'done': 10, 'total: 100, 'remaining': 90 } This will mean that the user has done a 10% of the available tasks for him and 90 tasks are yet to be submitted """ if current_user.is_anonymous(): return abort(401) if project_id or short_name: if short_name: project = project_repo.get_by_shortname(short_name) elif project_id: project = project_repo.get(project_id) if project: # For now, keep this version, but wait until redis cache is used here for task_runs too query_attrs = dict(project_id=project.id) query_attrs['user_id'] = current_user.id taskrun_count = task_repo.count_task_runs_with(**query_attrs) num_available_tasks = n_available_tasks(project.id, current_user.id) tmp = dict(done=taskrun_count, total=n_tasks(project.id), remaining=num_available_tasks) return Response(json.dumps(tmp), mimetype="application/json") else: return abort(404) else: # pragma: no cover return abort(404)
def user_progress(project_id=None, short_name=None): """API endpoint for user progress. Return a JSON object with two fields regarding the tasks for the user: { 'done': 10, 'total: 100 } This will mean that the user has done a 10% of the available tasks for him """ if project_id or short_name: if short_name: project = project_repo.get_by_shortname(short_name) elif project_id: project = project_repo.get(project_id) if project: # For now, keep this version, but wait until redis cache is used here for task_runs too query_attrs = dict(project_id=project.id) if current_user.is_anonymous(): query_attrs['user_ip'] = request.remote_addr or '127.0.0.1' else: query_attrs['user_id'] = current_user.id taskrun_count = task_repo.count_task_runs_with(**query_attrs) tmp = dict(done=taskrun_count, total=n_tasks(project.id)) return Response(json.dumps(tmp), mimetype="application/json") else: return abort(404) else: # pragma: no cover return abort(404)
def test_warn_project_owner(self, mail, clean_mock): """Test JOB email is sent to warn project owner.""" from smtplib import SMTPRecipientsRefused from nose.tools import assert_raises # Mock for the send method send_mock = MagicMock() send_mock.send.side_effect = SMTPRecipientsRefused('wrong') # Mock for the connection method connection = MagicMock() connection.__enter__.return_value = send_mock # Join them mail.connect.return_value = connection date = '2010-10-22T11:02:00.000000' owner = UserFactory.create(consent=True, subscribed=True, email_addr="wrong") project = ProjectFactory.create(updated=date, owner=owner) project_id = project.id assert warn_old_project_owners() is False assert_raises(SMTPRecipientsRefused, send_mock.send, 'msg') project = project_repo.get(project_id) err_msg = "mail.connect() should be called" assert mail.connect.called, err_msg err_msg = "conn.send() should be called" assert send_mock.send.called, err_msg err_msg = "project.contacted field should be False" assert project.contacted is False, err_msg err_msg = "project.published field should be True" assert project.published, err_msg err_msg = "The update date should be the same" assert project.updated == date, err_msg
def _respond_csv(self, ty, id, info_only=False): out = tempfile.TemporaryFile() writer = UnicodeWriter(out) empty_row = [] p = project_repo.get(id) if p is not None: project_section = ['Project Statistics'] project_header = ['Id', 'Name', 'Short Name', 'Total Tasks', 'First Task Submission', 'Last Task Submission', 'Average Time Spend Per Task', 'Task Redundancy'] writer.writerow(project_section) writer.writerow(project_header) project_data = get_project_report_projectdata(id) writer.writerow(project_data) writer.writerow(empty_row) user_section = ['User Statistics'] user_header = ['Id', 'Name', 'Fullname', 'Email', 'Admin', 'Subadmin', 'Enabled', 'Languages', 'Locations', 'Start Time', 'End Time', 'Timezone', 'Type of User', 'Additional Comments', 'Total Tasks Completed', 'Percent Tasks Completed', 'First Task Submission', 'Last Task Submission', 'Average Time Per Task'] writer.writerow(user_section) users_project_data = get_project_report_userdata(id) if users_project_data: writer.writerow(user_header) for user_data in users_project_data: writer.writerow(user_data) else: writer.writerow(['No user data']) return self._get_csv(out, writer)
def send_email_notifications(): from pybossa.core import sentinel from pybossa.cache import projects as cached_projects from pybossa.core import project_repo from pybossa.sched import Schedulers redis_conn = sentinel.master project_set = redis_conn.hgetall('updated_project_ids') or {} for project_id, timestamp in project_set.iteritems(): project = project_repo.get(project_id) redis_conn.hdel('updated_project_ids', project_id) if not project.email_notif: continue user_emails = [] if cached_projects.get_project_scheduler(project_id) == Schedulers.user_pref: user_emails = user_repo.get_user_pref_recent_contributor_emails(project_id, timestamp) else: if cached_projects.overall_progress(project_id) != 100: user_emails = user_repo.get_recent_contributor_emails(project_id) if user_emails: recipients = [] for email_addr in user_emails: if email_addr not in recipients: recipients.append(email_addr) subject = (u'New Tasks have been imported to {}'.format(project.name)) body = u'Hello,\n\nThere have been new tasks uploaded to the previously finished project, {0}. ' \ u'\nLog on to {1} to complete any available tasks.' \ .format(project.name, current_app.config.get('BRAND')) mail_dict = dict(recipients=recipients, subject=subject, body=body) send_mail(mail_dict) return True
def test_new_project_task_redundancy_updated(self, mock_importer, mock_update_redundancy): """Test task redundancy updated for new projects.""" self.register(name=Fixtures.name) self.signin() min_answers = 10 vol = dict(id='123abc', name='My Volume') category = CategoryFactory() tmpl_fixtures = TemplateFixtures(category) select_task = tmpl_fixtures.iiif_select_tmpl tmpl = tmpl_fixtures.create(task_tmpl=select_task) tmpl['min_answers'] = min_answers category.info = dict(presenter='iiif-annotation', volumes=[vol], templates=[tmpl]) project_repo.update_category(category) endpoint = '/lc/projects/{}/new'.format(category.short_name) form_data = dict(name='foo', short_name='bar', template_id=tmpl['id'], volume_id=vol['id']) self.app_post_json(endpoint, data=form_data) project = project_repo.get(1) mock_update_redundancy.assert_called_once_with(project, min_answers)
def test_project_created_with_correct_details(self, mock_importer): """Test that a project is created with the correct details.""" self.register(name=Fixtures.name) self.signin() vol = dict(id='123abc', name='My Volume') category = CategoryFactory() tmpl_fixtures = TemplateFixtures(category) select_task = tmpl_fixtures.iiif_select_tmpl tmpl = tmpl_fixtures.create(task_tmpl=select_task) category.info = dict(presenter='iiif-annotation', volumes=[vol], templates=[tmpl]) project_repo.update_category(category) endpoint = '/lc/projects/{}/new'.format(category.short_name) form_data = dict(name='foo', short_name='bar', template_id=tmpl['id'], volume_id=vol['id']) res = self.app_post_json(endpoint, data=form_data) project = project_repo.get(1) # Check project details assert_equal(project.name, form_data['name']) assert_equal(project.short_name, form_data['short_name']) assert_equal(project.webhook, 'http://localhost/lc/analysis') assert_equal(project.published, True) assert_equal(project.description, tmpl['description']) assert_dict_equal(project.info, { 'template_id': tmpl['id'], 'volume_id': vol['id'] })
def test_task_priority_external_uid(self): """Test SCHED respects priority_0 field for externa uid""" # Del previous TaskRuns self.create() self.del_task_runs() # By default, tasks without priority should be ordered by task.id (FIFO) tasks = db.session.query(Task).filter_by(project_id=1).order_by('id').all() project = project_repo.get(1) headers = self.get_headers_jwt(project) url = 'api/project/1/newtask?external_uid=342' res = self.app.get(url, headers=headers) task1 = json.loads(res.data) # Check that we received a Task err_msg = "Task.id should be the same" assert task1.get('id') == tasks[0].id, err_msg # Now let's change the priority to a random task import random t = random.choice(tasks) # Increase priority to maximum t.priority_0 = 1 db.session.add(t) db.session.commit() # Request again a new task res = self.app.get(url, headers=headers) task1 = json.loads(res.data) # Check that we received a Task err_msg = "Task.id should be the same" assert task1.get('id') == t.id, err_msg err_msg = "Task.priority_0 should be the 1" assert task1.get('priority_0') == 1, err_msg
def test_project_kpi_two_decimals_valid(self): """Test PROJECT valid kpi with 2 decimal places.""" self.register() self.signin() configs = { 'WTF_CSRF_ENABLED': True, 'PRODUCTS_SUBPRODUCTS': { 'north': ['winterfell'], 'west': ['westeros'] } } with patch.dict(self.flask_app.config, configs): # Valid kpi with 2 decimal places. url = '/project/new' project = dict(name='kpitwodecimals', short_name='kpitwodecimals', long_description='kpitwodecimals', password='******', product='north', subproduct='winterfell', kpi=0.16) csrf = self.get_csrf(url) res = self.app_post_json(url, headers={'X-CSRFToken': csrf}, data=project) data = json.loads(res.data) assert data.get('status') == SUCCESS, data proj_repo = project_repo.get(1) assert proj_repo.info['kpi'] == project['kpi'], 'kpi is valid'
def task_gold(project_id=None): """Make task gold""" if not current_user.is_authenticated: return abort(401) project = project_repo.get(project_id) if project is None or not (current_user.admin or current_user.id in project.owners_ids): raise Forbidden task_data = request.json task_id = task_data['task_id'] task = task_repo.get_task(task_id) if task.project_id != project_id: raise Forbidden task.calibration = 1 task.exported = True task.state = 'ongoing' preprocess_task_run(project_id, task_id, task_data) info = task_data['info'] if data_access_levels: task.gold_answers = upload_gold_data(task, project_id, info, task_id) else: task.gold_answers = info task_repo.update(task) return Response(json.dumps({'success': True}), 200, mimetype="application/json")
def test_warn_project_owner(self, mail, clean_mock): """Test JOB email is sent to warn project owner.""" # Mock for the send method send_mock = MagicMock() send_mock.send.return_value = True # Mock for the connection method connection = MagicMock() connection.__enter__.return_value = send_mock # Join them mail.connect.return_value = connection date = '2010-10-22T11:02:00.000000' project = ProjectFactory.create(updated=date) project_id = project.id warn_old_project_owners() project = project_repo.get(project_id) err_msg = "mail.connect() should be called" assert mail.connect.called, err_msg err_msg = "conn.send() should be called" assert send_mock.send.called, err_msg err_msg = "project.contacted field should be True" assert project.contacted, err_msg err_msg = "project.published field should be False" assert project.published is False, err_msg err_msg = "cache of project should be cleaned" clean_mock.assert_called_with(project_id), err_msg err_msg = "The update date should be different" assert project.updated != date, err_msg
def _retrieve_new_task(project_id): project = project_repo.get(project_id) if project is None: raise NotFound if not project.allow_anonymous_contributors and current_user.is_anonymous( ): info = dict(error="This project does not allow anonymous contributors") error = model.task.Task(info=info) return error if request.args.get('external_uid'): resp = jwt_authorize_project(project, request.headers.get('Authorization')) if resp != True: return resp if request.args.get('offset'): offset = int(request.args.get('offset')) else: offset = 0 user_id = None if current_user.is_anonymous() else current_user.id user_ip = request.remote_addr if current_user.is_anonymous() else None external_uid = request.args.get('external_uid') task = sched.new_task(project_id, project.info.get('sched'), user_id, user_ip, external_uid, offset) return task
def get_autoimport_jobs(queue='high'): current_app.logger.error("This isn't really an error, autoimport was triggered!") """Get autoimport jobs.""" from pybossa.core import project_repo #current_app.logger.error("v1a") import pybossa.cache.projects as cached_projects #current_app.logger.error("v1b") from pybossa.pro_features import ProFeatureHandler #current_app.logger.error("v1c") feature_handler = ProFeatureHandler(current_app.config.get('PRO_FEATURES')) timeout = current_app.config.get('TIMEOUT') #current_app.logger.error("v2") #if feature_handler.only_for_pro('autoimporter'): # current_app.logger.error("v3a") # projects = cached_projects.get_from_pro_user() #else: #current_app.logger.error("v3b") projects = (p.dictize() for p in project_repo.get_all()) #current_app.logger.error("v4") for project_dict in projects: #current_app.logger.error("v5") project = project_repo.get(project_dict['id']) #current_app.logger.error(str(project)) if project.has_autoimporter(): #current_app.logger.error(str(project)) #current_app.logger.error("v6") job = dict(name=import_tasks, args=[project.id, True], kwargs=project.get_autoimporter(), timeout=timeout, queue=queue) #current_app.logger.error(str(job)) #current_app.logger.error("v7") yield job
def test_project_kpi_range_above_threshold(self): """Test PROJECT invalid kpi range above threshold.""" self.register() self.signin() configs = { 'WTF_CSRF_ENABLED': True, 'PRODUCTS_SUBPRODUCTS': { 'north': ['winterfell'], 'west': ['westeros'] } } with patch.dict(self.flask_app.config, configs): # Invalid kpi above maximum threshold of 120. url = '/project/new' project = dict(name='kpiabovemax', short_name='kpiabovemax', long_description='kpiabovemax', password='******', product='north', subproduct='winterfell', kpi=121) csrf = self.get_csrf(url) res = self.app_post_json(url, headers={'X-CSRFToken': csrf}, data=project) data = json.loads(res.data) assert data.get('status') != SUCCESS, data proj_repo = project_repo.get(1) err_msg = {'kpi': ['Number must be between 0.1 and 120.']} assert data.get( 'errors') and data['form']['errors'] == err_msg, data
def test_task_priority_external_uid(self): """Test SCHED respects priority_0 field for externa uid""" # Del previous TaskRuns self.create() self.del_task_runs() # By default, tasks without priority should be ordered by task.id (FIFO) tasks = db.session.query(Task).filter_by( project_id=1).order_by('id').all() project = project_repo.get(1) headers = self.get_headers_jwt(project) url = 'api/project/1/newtask?external_uid=342' res = self.app.get(url, headers=headers) task1 = json.loads(res.data) # Check that we received a Task err_msg = "Task.id should be the same" assert task1.get('id') == tasks[0].id, err_msg # Now let's change the priority to a random task import random t = random.choice(tasks) # Increase priority to maximum t.priority_0 = 1 db.session.add(t) db.session.commit() # Request again a new task res = self.app.get(url, headers=headers) task1 = json.loads(res.data) # Check that we received a Task err_msg = "Task.id should be the same" assert task1.get('id') == t.id, err_msg err_msg = "Task.priority_0 should be the 1" assert task1.get('priority_0') == 1, err_msg
def test_project_kpi_missing_value(self): """Test PROJECT missing kpi value.""" self.register() self.signin() configs = { 'WTF_CSRF_ENABLED': True, 'PRODUCTS_SUBPRODUCTS': { 'north': ['winterfell'], 'west': ['westeros'] } } with patch.dict(self.flask_app.config, configs): # Missing kpi value. url = '/project/new' project = dict(name='kpiabovemax', short_name='kpiabovemax', long_description='kpiabovemax', password='******', product='north', subproduct='winterfell') csrf = self.get_csrf(url) res = self.app_post_json(url, headers={'X-CSRFToken': csrf}, data=project) data = json.loads(res.data) assert data.get('status') != SUCCESS, data proj_repo = project_repo.get(1) err_msg = {'kpi': ['This field is required.']} assert data.get( 'errors') and data['form']['errors'] == err_msg, data
def send_email_notifications(): from pybossa.core import sentinel from pybossa.cache import projects as cached_projects from pybossa.core import project_repo redis_conn = sentinel.master project_set = redis_conn.smembers('updated_project_ids') if project_set: for project_id in project_set: project = project_repo.get(project_id) if cached_projects.overall_progress(project_id) != 100 and project.email_notif: user_emails = user_repo.get_recent_contributor_emails(project_id) recipients = [] if user_emails: for email_addr in user_emails: if email_addr not in recipients: recipients.append(email_addr) subject = ('New Tasks have been imported to {}'.format(project.name)) body = "Hello,\n\nThere have been new tasks uploaded to the previously finished project, {0}. " \ "\nLog on to {1} to complete any available tasks." \ .format(project.name, current_app.config.get('BRAND')) mail_dict = dict(recipients=recipients, subject=subject, body=body) send_mail(mail_dict) redis_conn.srem('updated_project_ids', project_id) return True
def get_service_request(task_id, service_name, major_version, minor_version): """Proxy service call""" proxy_service_config = current_app.config.get('PROXY_SERVICE_CONFIG', None) task = task_repo.get_task(task_id) project = project_repo.get(task.project_id) if not (task and proxy_service_config and service_name and major_version and minor_version): return abort(400) timeout = project.info.get('timeout', ContributionsGuard.STAMP_TTL) task_locked_by_user = has_lock(task.id, current_user.id, timeout) payload = request.json if isinstance(request.json, dict) else None if payload and task_locked_by_user: service = _get_valid_service(task_id, service_name, payload, proxy_service_config) if isinstance(service, dict): url = '{}/{}/{}/{}'.format(proxy_service_config['uri'], service_name, major_version, minor_version) headers = service.get('headers') ret = requests.post(url, headers=headers, json=payload['data']) return Response(ret.content, 200, mimetype="application/json") current_app.logger.info( 'Task id {} with lock-status {} by user {} with this payload {} failed.' .format(task_id, task_locked_by_user, current_user.id, payload)) return abort(403)
def _preprocess_post_data(self, data): project_id = data["project_id"] info = data["info"] duplicate = task_repo.find_duplicate(project_id=project_id, info=info) if duplicate: message = { 'reason': 'DUPLICATE_TASK', 'task_id': duplicate } raise Conflict(json.dumps(message)) if 'n_answers' not in data: project = project_repo.get(project_id) data['n_answers'] = project.get_default_n_answers() user_pref = data.get('user_pref', {}) if user_pref.get('languages'): user_pref['languages'] = [s.lower() for s in user_pref.get('languages', [])] if user_pref.get('locations'): user_pref['locations'] = [s.lower() for s in user_pref.get('locations', [])] if user_pref.get('assign_user'): user_pref['assign_user'] = [s.lower() for s in user_pref.get('assign_user', [])] invalid_fields = validate_required_fields(info) if invalid_fields: raise BadRequest('Missing or incorrect required fields: {}' .format(','.join(invalid_fields))) if data.get('gold_answers'): try: gold_answers = data['gold_answers'] if type(gold_answers) is dict: data['calibration'] = 1 data['exported'] = True except Exception as e: raise BadRequest('Invalid gold_answers')
def project_export(_id): """Export project.""" from pybossa.core import project_repo, json_exporter, csv_exporter app = project_repo.get(_id) if app is not None: json_exporter.pregenerate_zip_files(app) csv_exporter.pregenerate_zip_files(app)
def _retrieve_new_task(project_id): project = project_repo.get(project_id) if project is None: raise NotFound if not project.allow_anonymous_contributors and current_user.is_anonymous(): info = dict( error="This project does not allow anonymous contributors") error = model.task.Task(info=info) return error if request.args.get('external_uid'): resp = jwt_authorize_project(project, request.headers.get('Authorization')) if resp != True: return resp if request.args.get('offset'): offset = int(request.args.get('offset')) else: offset = 0 user_id = None if current_user.is_anonymous() else current_user.id user_ip = request.remote_addr if current_user.is_anonymous() else None external_uid = request.args.get('external_uid') task = sched.new_task(project_id, project.info.get('sched'), user_id, user_ip, external_uid, offset) return task
def test_create_onesignal(self, mock_queue, mock_onesignal): """Test create_onesignal with config works.""" client = MagicMock() osdata = {'basic_auth': 'auth', 'id': 1} client.create_app.return_value = (200, 'OK', osdata) mock_onesignal.return_value = client with patch.dict(self.flask_app.config, {'ONESIGNAL_AUTH_KEY': 'key'}): user = UserFactory.create() project_data = dict(name='name', short_name='short_name', description='desc', long_description='long') url = '/api/project?api_key=%s' % user.api_key res = self.app.post(url, data=json.dumps(project_data)) assert res.status_code == 200, res.data project = json.loads(res.data) mock_queue.assert_called_with(create_onesignal_app, project['id']) res = create_onesignal_app(project['id']) assert res[0] == 200, res assert res[1] == 'OK', res assert res[2]['id'] == 1, res new = project_repo.get(project['id']) assert new.info['onesignal'] == osdata, new.info assert new.info['onesignal_app_id'] == 1, new.info
def user_progress(project_id=None, short_name=None): """API endpoint for user progress. Return a JSON object with two fields regarding the tasks for the user: { 'done': 10, 'total: 100 } This will mean that the user has done a 10% of the available tasks for him """ if project_id or short_name: if short_name: project = project_repo.get_by_shortname(short_name) elif project_id: project = project_repo.get(project_id) if project: # For now, keep this version, but wait until redis cache is # used here for task_runs too query_attrs = dict(project_id=project.id) if current_user.is_anonymous: query_attrs['user_ip'] = anonymizer.ip(request.remote_addr or '127.0.0.1') else: query_attrs['user_id'] = current_user.id taskrun_count = task_repo.count_task_runs_with(**query_attrs) tmp = dict(done=taskrun_count, total=n_tasks(project.id)) return Response(json.dumps(tmp), mimetype="application/json") else: return abort(404) else: # pragma: no cover return abort(404)
def update(task): if not current_user.is_anonymous(): app = project_repo.get(task.app_id) if app.owner_id == current_user.id or current_user.admin is True: return True else: return False else: return False
def post(self): try: project_id = json.loads(request.data).get("project_id") project = project_repo.get(project_id) if project is not None and not project.published: return json.dumps({"status": "OK"}) except Exception as e: return super(TaskRunAPI, self).post() return super(TaskRunAPI, self).post()
def _retrieve_new_task(project_id): project = project_repo.get(project_id) if project is None: raise NotFound if not project.allow_anonymous_contributors and current_user.is_anonymous(): info = dict( error="This project does not allow anonymous contributors") error = [model.task.Task(info=info)] return error if request.args.get('external_uid'): resp = jwt_authorize_project(project, request.headers.get('Authorization')) if resp != True: return resp if request.args.get('limit'): limit = int(request.args.get('limit')) else: limit = 1 if limit > 100: limit = 100 if request.args.get('offset'): offset = int(request.args.get('offset')) else: offset = 0 if request.args.get('orderby'): orderby = request.args.get('orderby') else: orderby = 'id' if request.args.get('desc'): desc = fuzzyboolean(request.args.get('desc')) else: desc = False user_id = None if current_user.is_anonymous() else current_user.id user_ip = (anonymizer.ip(request.remote_addr or '127.0.0.1') if current_user.is_anonymous() else None) external_uid = request.args.get('external_uid') task = sched.new_task(project_id, project.info.get('sched'), user_id, user_ip, external_uid, offset, limit, orderby=orderby, desc=desc) return task
def test_task_preloading_external_uid(self): """Test TASK Pre-loading for external user IDs works""" project = ProjectFactory.create(info=dict(sched='depth_first_all'), owner=UserFactory.create(id=500)) TaskFactory.create_batch(10, project=project) assigned_tasks = [] # Get Task until scheduler returns None project = project_repo.get(1) headers = self.get_headers_jwt(project) url = 'api/project/%s/newtask?external_uid=2xb' % project.id res = self.app.get(url, headers=headers) task1 = json.loads(res.data) # Check that we received a Task assert task1.get('id'), task1 # Pre-load the next task for the user res = self.app.get(url + '&offset=1', headers=headers) task2 = json.loads(res.data) # Check that we received a Task assert task2.get('id'), task2 # Check that both tasks are different assert task1.get('id') != task2.get('id'), "Tasks should be different" ## Save the assigned task assigned_tasks.append(task1) assigned_tasks.append(task2) # Submit an Answer for the assigned and pre-loaded task for t in assigned_tasks: tr = dict(project_id=t['project_id'], task_id=t['id'], info={'answer': 'No'}, external_uid='2xb') tr = json.dumps(tr) res = self.app.post('/api/taskrun?external_uid=2xb', data=tr, headers=headers) # Get two tasks again res = self.app.get(url, headers=headers) task3 = json.loads(res.data) # Check that we received a Task assert task3.get('id'), task1 # Pre-load the next task for the user res = self.app.get(url + '&offset=1', headers=headers) task4 = json.loads(res.data) # Check that we received a Task assert task4.get('id'), task2 # Check that both tasks are different assert task3.get('id') != task4.get('id'), "Tasks should be different" assert task1.get('id') != task3.get('id'), "Tasks should be different" assert task2.get('id') != task4.get('id'), "Tasks should be different" # Check that a big offset returns None res = self.app.get(url + '&offset=11', headers=headers) assert json.loads(res.data) == {}, res.data
def _retrieve_new_task(app_id): app = project_repo.get(app_id) if app is None: raise NotFound if request.args.get('offset'): offset = int(request.args.get('offset')) else: offset = 0 user_id = None if current_user.is_anonymous() else current_user.id user_ip = request.remote_addr if current_user.is_anonymous() else None task = sched.new_task(app_id, user_id, user_ip, offset) return task
def import_tasks(project_id, **form_data): """Import tasks for a project.""" from pybossa.core import project_repo app = project_repo.get(project_id) msg = importer.create_tasks(task_repo, project_id, **form_data) msg = msg + " to your project %s!" % app.name subject = "Tasks Import to your project %s" % app.name body = "Hello,\n\n" + msg + "\n\nAll the best,\nThe %s team." % current_app.config.get("BRAND") mail_dict = dict(recipients=[app.owner.email_addr], subject=subject, body=body) send_mail(mail_dict) return msg
def create(taskrun=None): project = project_repo.get(task_repo.get_task(taskrun.task_id).app_id) if (current_user.is_anonymous() and project.allow_anonymous_contributors is False): return False authorized = task_repo.count_task_runs_with(app_id=taskrun.app_id, task_id=taskrun.task_id, user_id=taskrun.user_id, user_ip=taskrun.user_ip) <= 0 if not authorized: raise abort(403) return authorized
def import_tasks(tasks_info, app_id): from pybossa.core import task_repo, project_repo from flask import current_app import pybossa.importers as importers app = project_repo.get(app_id) msg = importers.create_tasks(task_repo, tasks_info, app_id) msg = msg + ' to your project %s!' % app.name subject = 'Tasks Import to your project %s' % app.name body = 'Hello,\n\n' + msg + '\n\nAll the best,\nThe %s team.' % current_app.config.get('BRAND') mail_dict = dict(recipients=[app.owner.email_addr], subject=subject, body=body) send_mail(mail_dict) return msg
def get_autoimport_jobs(queue='low'): """Get autoimport jobs.""" from pybossa.core import project_repo import pybossa.cache.projects as cached_projects pro_user_projects = cached_projects.get_from_pro_user() for project_dict in pro_user_projects: project = project_repo.get(project_dict['id']) if project.has_autoimporter(): job = dict(name=import_tasks, args=[project.id], kwargs=project.get_autoimporter(), timeout=(10 * MINUTE), queue=queue) yield job
def push_notification(project_id, **kwargs): """Send push notification.""" from pybossa.core import project_repo project = project_repo.get(project_id) if project.info.get('onesignal'): app_id = current_app.config.get('ONESIGNAL_APP_ID') api_key = current_app.config.get('ONESIGNAL_API_KEY') client = PybossaOneSignal(app_id=app_id, api_key=api_key) filters = [{"field": "tag", "key": project_id, "relation": "exists"}] return client.push_msg(contents=kwargs['contents'], headings=kwargs['headings'], launch_url=kwargs['launch_url'], web_buttons=kwargs['web_buttons'], filters=filters)
def send_weekly_stats_project(project_id): from pybossa.cache.project_stats import update_stats, get_stats from pybossa.core import project_repo from datetime import datetime project = project_repo.get(project_id) if project.owner.subscribed is False: return "Owner does not want updates by email" update_stats(project_id) dates_stats, hours_stats, users_stats = get_stats(project_id, geo=True, period='1 week') subject = "Weekly Update: %s" % project.name timeout = current_app.config.get('TIMEOUT') # Max number of completed tasks n_completed_tasks = 0 xy = zip(*dates_stats[3]['values']) n_completed_tasks = max(xy[1]) # Most active day xy = zip(*dates_stats[0]['values']) active_day = [xy[0][xy[1].index(max(xy[1]))], max(xy[1])] active_day[0] = datetime.fromtimestamp(active_day[0]/1000).strftime('%A') body = render_template('/account/email/weeklystats.md', project=project, dates_stats=dates_stats, hours_stats=hours_stats, users_stats=users_stats, n_completed_tasks=n_completed_tasks, active_day=active_day, config=current_app.config) html = render_template('/account/email/weeklystats.html', project=project, dates_stats=dates_stats, hours_stats=hours_stats, users_stats=users_stats, active_day=active_day, n_completed_tasks=n_completed_tasks, config=current_app.config) mail_dict = dict(recipients=[project.owner.email_addr], subject=subject, body=body, html=html) job = dict(name=send_mail, args=[mail_dict], kwargs={}, timeout=timeout, queue='high') enqueue_job(job)
def import_tasks(project_id, from_auto=False, **form_data): """Import tasks for a project.""" from pybossa.core import project_repo project = project_repo.get(project_id) report = importer.create_tasks(task_repo, project_id, **form_data) if from_auto: form_data["last_import_meta"] = report.metadata project.set_autoimporter(form_data) project_repo.save(project) msg = report.message + " to your project %s!" % project.name subject = "Tasks Import to your project %s" % project.name body = "Hello,\n\n" + msg + "\n\nAll the best,\nThe %s team." % current_app.config.get("BRAND") mail_dict = dict(recipients=[project.owner.email_addr], subject=subject, body=body) send_mail(mail_dict) return msg
def webhook(url, payload=None, oid=None, rerun=False): """Post to a webhook.""" from flask import current_app from readability.readability import Document try: import json from pybossa.core import sentinel, webhook_repo, project_repo project = project_repo.get(payload['project_id']) headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} if oid: webhook = webhook_repo.get(oid) else: webhook = Webhook(project_id=payload['project_id'], payload=payload) if url: params = dict() if rerun: params['rerun'] = True response = requests.post(url, params=params, data=json.dumps(payload), headers=headers) webhook.response = Document(response.text).summary() webhook.response_status_code = response.status_code else: raise requests.exceptions.ConnectionError('Not URL') if oid: webhook_repo.update(webhook) webhook = webhook_repo.get(oid) else: webhook_repo.save(webhook) except requests.exceptions.ConnectionError: webhook.response = 'Connection Error' webhook.response_status_code = None webhook_repo.save(webhook) finally: if project.published and webhook.response_status_code != 200 and current_app.config.get('ADMINS'): subject = "Broken: %s webhook failed" % project.name body = 'Sorry, but the webhook failed' mail_dict = dict(recipients=current_app.config.get('ADMINS'), subject=subject, body=body, html=webhook.response) send_mail(mail_dict) if current_app.config.get('SSE'): publish_channel(sentinel, payload['project_short_name'], data=webhook.dictize(), type='webhook', private=True) return webhook
def _retrieve_new_task(project_id): project = project_repo.get(project_id) if project is None: raise NotFound if not project.allow_anonymous_contributors and current_user.is_anonymous(): info = dict( error="This project does not allow anonymous contributors") error = model.task.Task(info=info) return error if request.args.get('offset'): offset = int(request.args.get('offset')) else: offset = 0 user_id = None if current_user.is_anonymous() else current_user.id user_ip = request.remote_addr if current_user.is_anonymous() else None task = sched.new_task(project_id, project.info.get('sched'), user_id, user_ip, offset) return task
def featured(project_id=None): """List featured projects of PYBOSSA.""" try: if request.method == 'GET': categories = cached_cat.get_all() projects = {} for c in categories: n_projects = cached_projects.n_count(category=c.short_name) projects[c.short_name] = cached_projects.get( category=c.short_name, page=1, per_page=n_projects) response = dict(template = '/admin/projects.html', projects=projects, categories=categories, form=dict(csrf=generate_csrf())) return handle_content_type(response) else: project = project_repo.get(project_id) if project: ensure_authorized_to('update', project) if request.method == 'POST': if project.featured is True: msg = "Project.id %s already featured" % project_id return format_error(msg, 415) cached_projects.reset() project.featured = True project_repo.update(project) return json.dumps(project.dictize()) if request.method == 'DELETE': if project.featured is False: msg = 'Project.id %s is not featured' % project_id return format_error(msg, 415) cached_projects.reset() project.featured = False project_repo.update(project) return json.dumps(project.dictize()) else: msg = 'Project.id %s not found' % project_id return format_error(msg, 404) except Exception as e: # pragma: no cover current_app.logger.error(e) return abort(500)
def _file_upload(self, data): """Method that must be overriden by the class to allow file uploads for only a few classes.""" cls_name = self.__class__.__name__.lower() content_type = 'multipart/form-data' if (content_type in request.headers.get('Content-Type') and cls_name in self.allowed_classes_upload): tmp = dict() for key in request.form.keys(): tmp[key] = request.form[key] if isinstance(self, announcement.Announcement): # don't check project id for announcements ensure_authorized_to('create', self) upload_method = current_app.config.get('UPLOAD_METHOD') if request.files.get('file') is None: raise AttributeError _file = request.files['file'] container = "user_%s" % current_user.id else: ensure_authorized_to('create', self.__class__, project_id=tmp['project_id']) project = project_repo.get(tmp['project_id']) upload_method = current_app.config.get('UPLOAD_METHOD') if request.files.get('file') is None: raise AttributeError _file = request.files['file'] if current_user.admin: container = "user_%s" % project.owner.id else: container = "user_%s" % current_user.id uploader.upload_file(_file, container=container) file_url = get_avatar_url(upload_method, _file.filename, container) tmp['media_url'] = file_url if tmp.get('info') is None: tmp['info'] = dict() tmp['info']['container'] = container tmp['info']['file_name'] = _file.filename return tmp else: return None
def test_warn_project_owner_two(self, clean_mock): """Test JOB email is sent to warn project owner.""" from pybossa.core import mail with mail.record_messages() as outbox: date = '2010-10-22T11:02:00.000000' project = ProjectFactory.create(updated=date) project_id = project.id warn_old_project_owners() project = project_repo.get(project_id) assert len(outbox) == 1, outbox subject = 'Your PyBossa project: %s has been inactive' % project.name assert outbox[0].subject == subject err_msg = "project.contacted field should be True" assert project.contacted, err_msg err_msg = "project.published field should be False" assert project.published is False, err_msg err_msg = "cache of project should be cleaned" clean_mock.assert_called_with(project_id), err_msg err_msg = "The update date should be different" assert project.updated != date, err_msg
def test_get_last_version(self): """Test API result returns always latest version.""" result = self.create_result() project = project_repo.get(result.project_id) task = task_repo.get_task(result.task_id) task.n_answers = 2 TaskRunFactory.create(task=task, project=project) result = result_repo.get_by(project_id=project.id) assert result.last_version is True, result.last_version result_id = result.id results = result_repo.filter_by(project_id=project.id, last_version=False) assert len(results) == 2, len(results) for r in results: if r.id == result_id: assert r.last_version is True, r.last_version else: assert r.last_version is False, r.last_version
def featured(app_id=None): """List featured apps of PyBossa""" try: if request.method == 'GET': categories = cached_cat.get_all() apps = {} for c in categories: n_apps = cached_apps.n_count(category=c.short_name) apps[c.short_name] = cached_apps.get(category=c.short_name, page=1, per_page=n_apps) return render_template('/admin/applications.html', apps=apps, categories=categories) else: app = project_repo.get(app_id) if app: require.app.update(app) if request.method == 'POST': if app.featured is True: msg = "App.id %s already featured" % app_id return format_error(msg, 415) cached_apps.reset() app.featured = True project_repo.update(app) return json.dumps(app.dictize()) if request.method == 'DELETE': if app.featured is False: msg = 'App.id %s is not featured' % app_id return format_error(msg, 415) cached_apps.reset() app.featured = False project_repo.update(app) return json.dumps(app.dictize()) else: msg = 'App.id %s not found' % app_id return format_error(msg, 404) except Exception as e: # pragma: no cover current_app.logger.error(e) return abort(500)
def get_autoimport_jobs(queue='low'): """Get autoimport jobs.""" from pybossa.core import project_repo import pybossa.cache.projects as cached_projects from pybossa.pro_features import ProFeatureHandler feature_handler = ProFeatureHandler(current_app.config.get('PRO_FEATURES')) timeout = current_app.config.get('TIMEOUT') if feature_handler.only_for_pro('autoimporter'): projects = cached_projects.get_from_pro_user() else: projects = (p.dictize() for p in project_repo.get_all()) for project_dict in projects: project = project_repo.get(project_dict['id']) if project.has_autoimporter(): job = dict(name=import_tasks, args=[project.id, True], kwargs=project.get_autoimporter(), timeout=timeout, queue=queue) yield job
def test_project_new_auth(self): """Test JSON PROJECT (GET/POST) New works.""" with patch.dict(self.flask_app.config, {'WTF_CSRF_ENABLED': True}): self.register() url = '/project/new' res = self.app_get_json(url, follow_redirects=True) data = json.loads(res.data) keys = sorted(['errors', 'form', 'template', 'title']) assert keys == sorted(data.keys()), data assert data.get('form').get('csrf') is not None, data # With errors and NOT CSRF res = self.app_post_json(url, follow_redirects=True) data = json.loads(res.data) assert data.get('code') == 400, data assert data.get('description') == 'CSRF token missing or incorrect.', data # With errors and CSRF csrf = self.get_csrf(url) print csrf res = self.app_post_json(url, headers={'X-CSRFToken': csrf}) data = json.loads(res.data) assert data.get('errors'), data assert len(data.get('form').get('errors').get('name')) == 1, data assert len(data.get('form').get('errors').get('short_name')) == 1, data assert len(data.get('form').get('errors').get('long_description')) == 1, data # New Project project = dict(name='project1', short_name='project1', long_description='lore ipsum') csrf = self.get_csrf(url) print csrf res = self.app_post_json(url, headers={'X-CSRFToken': csrf}, data=project) data = json.loads(res.data) assert data.get('status') == SUCCESS, data url_next = '/project/%s/update' % project['short_name'] assert data.get('next') == url_next, data db_project = project_repo.get(1) err_msg = "It should be the same project" assert db_project.name == project['name'], err_msg