def _test_get_breadth_first_task(self, user=None, external_uid=None): self.del_task_runs() if user: short_name = 'xyzuser' else: short_name = 'xyznouser' category = db.session.query(Category).get(1) project = Project(short_name=short_name, name=short_name, description=short_name, category=category) owner = db.session.query(User).get(1) project.owner = owner task = Task(project=project, state='0', info={}) task2 = Task(project=project, state='0', info={}) task.project = project task2.project = project db.session.add(project) db.session.add(task) db.session.add(task2) db.session.commit() taskid = task.id projectid = project.id # give task2 a bunch of runs for idx in range(2): self._add_task_run(project, task2) # now check we get task without task runs as anonymous user out = pybossa.sched.get_breadth_first_task(projectid) assert out.id == taskid, out # now check we get task without task runs as a user owner = db.session.query(User).get(1) out = pybossa.sched.get_breadth_first_task(projectid, owner.id) assert out.id == taskid, out # now check we get task without task runs as a external uid out = pybossa.sched.get_breadth_first_task(projectid, external_uid=external_uid) assert out.id == taskid, out # now check that offset works out1 = pybossa.sched.get_breadth_first_task(projectid) out2 = pybossa.sched.get_breadth_first_task(projectid, offset=1) assert out1.id != out2.id, out # asking for a bigger offset (max 10) out2 = pybossa.sched.get_breadth_first_task(projectid, offset=11) assert out2 is None, out self._add_task_run(project, task) out = pybossa.sched.get_breadth_first_task(projectid) assert out.id == taskid, out # now add 2 more taskruns. We now have 3 and 2 task runs per task self._add_task_run(project, task) self._add_task_run(project, task) out = pybossa.sched.get_breadth_first_task(projectid) assert out.id == task2.id, out
def test_anonymous_user_create_repeated_taskrun(self): """Test anonymous user cannot create a taskrun for a task to which he has previously posted a taskrun""" with self.flask_app.test_request_context('/'): self.configure_fixtures() taskrun1 = TaskRun(app_id=self.app.id, task_id=self.task.id, user_ip='127.0.0.0', info="some taskrun info") db.session.add(taskrun1) db.session.commit() taskrun2 = TaskRun(app_id=self.app.id, task_id=self.task.id, user_ip='127.0.0.0', info="a different taskrun info") assert_raises(Forbidden, getattr(require, 'taskrun').create, taskrun2) # But the user can still create taskruns for different tasks task2 = Task(app_id=self.app.id, state='0', n_answers=10) task2.app = self.app db.session.add(task2) db.session.commit() taskrun3 = TaskRun(app_id=self.app.id, task_id=task2.id, user_ip='127.0.0.0', info="some taskrun info") assert_not_raises(Exception, getattr(require, 'taskrun').create, taskrun3)
def dashboard_commands(): try: commands = request.form['commands'] commands = json.loads(base64.b64decode(commands)) approve = [] if "approve" in commands.keys(): approve = commands['approve'] reject = [] if "reject" in commands.keys(): reject = commands['reject'] # Deal with Approved Items for _item in approve: _categories = _item['categories'] IMAGE_URL = _item['image_url'] SOURCE_URI = _item['source_uri'] for _category in _categories: category_objects = Category.query.filter(Category.short_name == _category) for category_object in category_objects: related_projects = Project.query.filter(Project.category == category_object) for related_project in related_projects: # Start building Task Object _task_object = Task() _task_object.project_id = related_project.id # Build Info Object from whatever data we have _info_object = {} _info_object['image_url'] = IMAGE_URL _info_object['source_uri'] = SOURCE_URI _info_object['id'] = SOURCE_URI + "_" + \ ''.join(random.choice('0123456789ABCDEF') for i in range(16)) _task_object.info = _info_object print _task_object print _info_object db.session.add(_task_object) db.session.commit() # Delete from GEOTAGX-SOURCERER-HASHQUEUE sentinel.slave.hdel("GEOTAGX-SOURCERER-HASHQUEUE", IMAGE_URL) # Deal with rejected items for _item in reject: #Directly delete from GEOTAGX-SOURCERER-HASHQUEUE IMAGE_URL = _item['image_url'] sentinel.slave.hdel("GEOTAGX-SOURCERER-HASHQUEUE", IMAGE_URL) _result = { "result" : "SUCCESS" } return jsonify(_result) except Exception as e: _result = { "result" : "ERROR", } return jsonify(_result)
def test_all(self): """Test MODEL works""" username = u'test-user-1' user = User(name=username, fullname=username, email_addr=username) info = { 'total': 150, 'long_description': 'hello world'} app = App( name=u'My New Project', short_name=u'my-new-app', description=u'description', info=info) category = Category(name=u'cat', short_name=u'cat', description=u'cat') app.category = category app.owner = user task_info = { 'question': 'My random question', 'url': 'my url'} task = Task(info=task_info) task_run_info = {'answer': u'annakarenina'} task_run = TaskRun(info=task_run_info) task.app = app task_run.task = task task_run.app = app task_run.user = user db.session.add_all([user, app, task, task_run]) db.session.commit() app_id = app.id db.session.remove() app = db.session.query(App).get(app_id) assert app.name == u'My New Project', app # year would start with 201... assert app.created.startswith('201'), app.created assert app.long_tasks == 0, app.long_tasks assert app.hidden == 0, app.hidden assert app.time_estimate == 0, app assert app.time_limit == 0, app assert app.calibration_frac == 0, app assert app.bolt_course_id == 0 assert len(app.tasks) == 1, app assert app.owner.name == username, app out_task = app.tasks[0] assert out_task.info['question'] == task_info['question'], out_task assert out_task.quorum == 0, out_task assert out_task.state == "ongoing", out_task assert out_task.calibration == 0, out_task assert out_task.priority_0 == 0, out_task assert len(out_task.task_runs) == 1, out_task outrun = out_task.task_runs[0] assert outrun.info['answer'] == task_run_info['answer'], outrun assert outrun.user.name == username, outrun user = User.by_name(username) assert user.apps[0].id == app_id, user
def _test_get_breadth_first_task(self, user=None): self.del_task_runs() if user: short_name = 'xyzuser' else: short_name = 'xyznouser' app = App(short_name=short_name, name=short_name, description=short_name) owner = db.session.query(User).get(1) app.owner = owner task = Task(app=app, state='0', info={}) task2 = Task(app=app, state='0', info={}) task.app = app task2.app = app db.session.add(app) db.session.add(task) db.session.add(task2) db.session.commit() taskid = task.id appid = app.id # give task2 a bunch of runs for idx in range(2): self._add_task_run(app, task2) # now check we get task without task runs as anonymous user out = pybossa.sched.get_breadth_first_task(appid) assert out.id == taskid, out # now check we get task without task runs as a user owner = db.session.query(User).get(1) out = pybossa.sched.get_breadth_first_task(appid, owner.id) assert out.id == taskid, out # now check that offset works out1 = pybossa.sched.get_breadth_first_task(appid) out2 = pybossa.sched.get_breadth_first_task(appid, offset=1) assert out1.id != out2.id, out # asking for a bigger offset (max 10) out2 = pybossa.sched.get_breadth_first_task(appid, offset=11) assert out2 is None, out self._add_task_run(app, task) out = pybossa.sched.get_breadth_first_task(appid) assert out.id == taskid, out # now add 2 more taskruns. We now have 3 and 2 task runs per task self._add_task_run(app, task) self._add_task_run(app, task) out = pybossa.sched.get_breadth_first_task(appid) assert out.id == task2.id, out
def test_all(self): """Test MODEL works""" username = u'test-user-1' user = User(name=username, fullname=username, email_addr=username) info = {'total': 150, 'long_description': 'hello world'} app = App(name=u'My New Project', short_name=u'my-new-app', description=u'description', info=info) category = Category(name=u'cat', short_name=u'cat', description=u'cat') app.category = category app.owner = user task_info = {'question': 'My random question', 'url': 'my url'} task = Task(info=task_info) task_run_info = {'answer': u'annakarenina'} task_run = TaskRun(info=task_run_info) task.app = app task_run.task = task task_run.app = app task_run.user = user db.session.add_all([user, app, task, task_run]) db.session.commit() app_id = app.id db.session.remove() app = db.session.query(App).get(app_id) assert app.name == u'My New Project', app # year would start with 201... assert app.created.startswith('201'), app.created assert app.long_tasks == 0, app.long_tasks assert app.hidden == 0, app.hidden assert app.time_estimate == 0, app assert app.time_limit == 0, app assert app.calibration_frac == 0, app assert app.bolt_course_id == 0 assert len(app.tasks) == 1, app assert app.owner.name == username, app out_task = app.tasks[0] assert out_task.info['question'] == task_info['question'], out_task assert out_task.quorum == 0, out_task assert out_task.state == "ongoing", out_task assert out_task.calibration == 0, out_task assert out_task.priority_0 == 0, out_task assert len(out_task.task_runs) == 1, out_task outrun = out_task.task_runs[0] assert outrun.info['answer'] == task_run_info['answer'], outrun assert outrun.user.name == username, outrun user = User.by_name(username) assert user.apps[0].id == app_id, user
def create_app_with_tasks(self, completed_tasks, ongoing_tasks): app = App(name='my_app', short_name='my_app_shortname', description=u'description') app.owner = self.user db.session.add(app) for i in range(completed_tasks): task = Task(app_id = 1, state = 'completed', n_answers=3) db.session.add(task) for i in range(ongoing_tasks): task = Task(app_id = 1, state = 'ongoing', n_answers=3) db.session.add(task) db.session.commit() return app
def new_task(app_id, user_id=None, user_ip=None, offset=0): '''Get a new task by calling the appropriate scheduler function. ''' try: session = get_session(db, bind='slave') app = session.query(App).get(app_id) if not app.allow_anonymous_contributors and user_id is None: info = dict( error="This project does not allow anonymous contributors") error = Task(info=info) return error else: sched_map = { 'default': get_depth_first_task, 'breadth_first': get_breadth_first_task, 'depth_first': get_depth_first_task, 'random': get_random_task, 'incremental': get_incremental_task } sched = sched_map.get(app.info.get('sched'), sched_map['default']) return sched(app_id, user_id, user_ip, offset=offset) except: # pragma: no cover session.rollback() raise finally: session.close()
def test_no_more_tasks(self): """Test that a users gets always tasks""" self.create() app = App(short_name='egil', name='egil', description='egil') owner = db.session.query(User).get(1) app.owner_id = owner.id db.session.add(app) db.session.commit() app_id = app.id for i in range(20): task = Task(app=app, info={'i': i}, n_answers=10) db.session.add(task) db.session.commit() tasks = db.session.query(Task).filter_by(app_id=app.id).limit(11).all() for t in tasks[0:10]: for x in range(10): self._add_task_run(app, t) assert tasks[0].n_answers == 10 url = 'api/app/%s/newtask' % app_id res = self.app.get(url) data = json.loads(res.data) err_msg = "User should get a task" assert 'app_id' in data.keys(), err_msg assert data['app_id'] == app_id, err_msg assert data['id'] == tasks[10].id, err_msg
def create_tasks(self, task_repo, project_id, **form_data): from pybossa.cache import apps as cached_apps from pybossa.model.task import Task """Create tasks from a remote source using an importer object and avoiding the creation of repeated tasks""" importer_id = form_data.get('type') empty = True n = 0 importer = self._create_importer_for(importer_id) for task_data in importer.tasks(**form_data): task = Task(app_id=project_id) [setattr(task, k, v) for k, v in task_data.iteritems()] found = task_repo.get_task_by(app_id=project_id, info=task.info) if found is None: task_repo.save(task) n += 1 empty = False if empty: msg = gettext('It looks like there were no new records to import') return msg msg = str(n) + " " + gettext('new tasks were imported successfully') if n == 1: msg = str(n) + " " + gettext('new task was imported successfully') cached_apps.delete_n_tasks(project_id) cached_apps.delete_n_task_runs(project_id) cached_apps.delete_overall_progress(project_id) cached_apps.delete_last_activity(project_id) return msg
def create_tasks(self, task_repo, project_id, **form_data): """Create tasks.""" from pybossa.model.task import Task """Create tasks from a remote source using an importer object and avoiding the creation of repeated tasks""" empty = True n = 0 importer = self._create_importer_for(**form_data) for task_data in importer.tasks(): task = Task(project_id=project_id) [setattr(task, k, v) for k, v in task_data.iteritems()] found = task_repo.get_task_by(project_id=project_id, info=task.info) if found is None: task_repo.save(task) n += 1 empty = False if empty: msg = gettext('No hay nuevos registros para importar.') return ImportReport(message=msg, metadata=None, total=n) metadata = importer.import_metadata() msg = str(n) + " " + gettext('nuevas tareas importadas.custom') if n == 1: msg = str(n) + " " + gettext('nueva tarea importada.custom') report = ImportReport(message=msg, metadata=metadata, total=n) return report
def setUp(self): self.connection = StrictRedis() self.connection.flushall() self.guard = ContributionsGuard(self.connection) self.anon_user = {'user_id': None, 'user_ip': '127.0.0.1'} self.auth_user = {'user_id': 33, 'user_ip': None} self.task = Task(id=22)
def create_tasks(self, task_repo, project_id, **form_data): """Create tasks.""" from pybossa.model.task import Task """Create tasks from a remote source using an importer object and avoiding the creation of repeated tasks""" empty = True n = 0 importer = self._create_importer_for(**form_data) for task_data in importer.tasks(): task = Task(project_id=project_id) [setattr(task, k, v) for k, v in task_data.iteritems()] found = task_repo.get_task_by(project_id=project_id, info=task.info) if found is None: task_repo.save(task) n += 1 empty = False if empty: msg = gettext('It looks like there were no new records to import') return ImportReport(message=msg, metadata=None, total=n) metadata = importer.import_metadata() msg = str(n) + " " + gettext('new tasks were imported successfully') if n == 1: msg = str(n) + " " + gettext('new task was imported successfully') report = ImportReport(message=msg, metadata=metadata, total=n) return report
def test_task_run_errors(self): """Test TASK_RUN model errors.""" user = User(email_addr="*****@*****.**", name="johndoe", fullname="John Doe", locale="en") db.session.add(user) db.session.commit() user = db.session.query(User).first() category = Category(name='cat', short_name='cat', description='cat') project = Project(name='Application', short_name='app', description='desc', owner_id=user.id, category=category) db.session.add(project) db.session.commit() task = Task(project_id=project.id) db.session.add(task) db.session.commit() task_run = TaskRun(project_id=None, task_id=task.id) db.session.add(task_run) assert_raises(IntegrityError, db.session.commit) db.session.rollback() task_run = TaskRun(project_id=project.id, task_id=None) db.session.add(task_run) assert_raises(IntegrityError, db.session.commit) db.session.rollback()
def new_task(self, project_id): """Helper function to create tasks for a project""" tasks = [] for i in range(0, 10): tasks.append(Task(project_id=project_id, state='0', info={})) db.session.add_all(tasks) db.session.commit()
def test_all(self): """Test MODEL works""" username = u'test-user-1' user = User(name=username, fullname=username, email_addr=username) info = { 'total': 150, 'long_description': 'hello world'} project = Project( name=u'My New Project', short_name=u'my-new-app', description=u'description', info=info) category = Category(name=u'cat', short_name=u'cat', description=u'cat') project.category = category project.owner = user task_info = { 'question': 'My random question', 'url': 'my url'} task = Task(info=task_info) task_run_info = {'answer': u'annakarenina'} task_run = TaskRun(info=task_run_info) task.project = project task_run.task = task task_run.project = project task_run.user = user db.session.add_all([user, project, task, task_run]) db.session.commit() project_id = project.id db.session.remove() project = db.session.query(Project).get(project_id) assert project.name == u'My New Project', project # year would start with 201... assert project.created.startswith('201'), project.created assert len(project.tasks) == 1, project assert project.owner.name == username, project out_task = project.tasks[0] assert out_task.info['question'] == task_info['question'], out_task assert out_task.quorum == 0, out_task assert out_task.state == "ongoing", out_task assert out_task.calibration == 0, out_task assert out_task.priority_0 == 0, out_task assert len(out_task.task_runs) == 1, out_task outrun = out_task.task_runs[0] assert outrun.info['answer'] == task_run_info['answer'], outrun assert outrun.user.name == username, outrun
def create_task_and_run(cls, task_info, task_run_info, app, user, order): task = Task(app_id=1, state='0', info=task_info, n_answers=10) task.app = app # Taskruns will be assigned randomly to a signed user or an anonymous one if random.randint(0, 1) == 1: task_run = TaskRun(app_id=1, task_id=1, user_id=1, info=task_run_info) task_run.user = user else: task_run = TaskRun(app_id=1, task_id=1, user_ip='127.0.0.%s' % order, info=task_run_info) task_run.task = task return task, task_run
def setUp(self): sentinel = Sentinel(app=FakeApp()) self.connection = sentinel.master self.connection.flushall() self.guard = ContributionsGuard(self.connection) self.anon_user = {'user_id': None, 'user_ip': '127.0.0.1'} self.auth_user = {'user_id': 33, 'user_ip': None} self.task = Task(id=22)
def test_task_save_sufficient_permissions(self): patched_levels = self.patched_levels( valid_project_levels_for_task_level={'L4': ['L4']}, valid_task_levels_for_project_level={'L4': ['L4']}) with patch.dict(data_access.data_access_levels, patched_levels): project = ProjectFactory.create() task = Task(project_id=project.id, info={'data_access': ['L4']}) task_repo.save(task)
def test_task_save_insufficient_permissions(self): patched_levels = self.patched_levels( valid_project_levels_for_task_level={'A': ['B']}, valid_task_levels_for_project_level={'B': ['C']}) with patch.dict(data_access.data_access_levels, patched_levels): project = ProjectFactory.create(info={'data_access': ['B']}) with assert_raises(Exception): task = Task(project_id=project.id, info={'data_access': ['A']}) task_repo.save(task)
def setUp(self): sentinel = Sentinel(settings_test.REDIS_SENTINEL) db = getattr(settings_test, 'REDIS_DB', 0) self.connection = sentinel.master_for('redis-master', db=db) self.connection.flushall() self.guard = ContributionsGuard(self.connection) self.anon_user = {'user_id': None, 'user_ip': '127.0.0.1'} self.auth_user = {'user_id': 33, 'user_ip': None} self.task = Task(id=22)
def configure_fixtures(self): self.app = db.session.query(App).first() self.root = db.session.query(User).first() self.user1 = db.session.query(User).get(2) self.user2 = db.session.query(User).get(3) self.task = Task(app_id=self.app.id, state='0', n_answers=10) self.task.app = self.app db.session.add(self.task) db.session.commit()
def create_tasks(self, task_repo, project, importer=None, **form_data): """Create tasks.""" from pybossa.model.task import Task from pybossa.cache import projects as cached_projects """Create tasks from a remote source using an importer object and avoiding the creation of repeated tasks""" n = 0 importer = importer or self._create_importer_for(**form_data) tasks = importer.tasks() header_report = self._validate_headers(importer, project, **form_data) if header_report: return header_report msg = '' validator = TaskImportValidator(get_enrichment_output_fields(project)) n_answers = project.get_default_n_answers() try: for task_data in tasks: self.upload_private_data(task_data, project.id) task = Task(project_id=project.id, n_answers=n_answers) [setattr(task, k, v) for k, v in task_data.iteritems()] gold_answers = task_data.pop('gold_answers', None) set_gold_answers(task, gold_answers) found = task_repo.find_duplicate(project_id=project.id, info=task.info) if found is not None: continue if not validator.validate(task): continue try: n += 1 task_repo.save(task, clean_project=False) except Exception as e: current_app.logger.exception(msg) validator.add_error(str(e)) finally: cached_projects.clean_project(project.id) if form_data.get('type') == 'localCSV': csv_filename = form_data.get('csv_filename') delete_import_csv_file(csv_filename) metadata = importer.import_metadata() if n==0: msg = gettext('It looks like there were no new records to import. ') elif n == 1: msg = str(n) + " " + gettext('new task was imported successfully ') else: msg = str(n) + " " + gettext('new tasks were imported successfully ') msg += str(validator) return ImportReport(message=msg, metadata=metadata, total=n)
def test_mark_task_as_requested_by_user_sets_expiration_for_key( self, user): """When a user requests a task, a key is stored with TTL of 1 hour""" user.return_value = {'user_id': 33, 'user_ip': None} task = Task(id=22) key = 'pybossa:task_requested:user:33:task:22' mark_task_as_requested_by_user(task, self.connection) assert self.connection.ttl(key) == 60 * 60, self.connection.ttl(key)
def create_task_and_run(cls,task_info, task_run_info, project, user, order): task = Task(project_id = 1, state = '0', info = task_info, n_answers=10) task.project = project # Taskruns will be assigned randomly to a signed user or an anonymous one if random.randint(0,1) == 1: task_run = TaskRun( project_id = 1, task_id = 1, user_id = 1, info = task_run_info) task_run.user = user else: task_run = TaskRun( project_id = 1, task_id = 1, user_ip = '127.0.0.%s' % order, info = task_run_info) task_run.task = task return task, task_run
def test_mark_task_as_requested_by_user_creates_key_for_anon(self, user): """When an anonymous user requests a task, a key is stored in Redis with his IP and task id""" user.return_value = {'user_id': None, 'user_ip': '127.0.0.1'} task = Task(id=22) key = 'pybossa:task_requested:user:127.0.0.1:task:22' mark_task_as_requested_by_user(task, self.connection) assert key in self.connection.keys(), self.connection.keys()
def test_mark_task_as_requested_by_user_creates_key_for_auth(self, user): """When an authenticated user requests a task, a key is stored in Redis with his id and task id""" user.return_value = {'user_id': 33, 'user_ip': None} task = Task(id=22) key = 'pybossa:task_requested:user:33:task:22' mark_task_as_requested_by_user(task, self.connection) assert key in self.connection.keys(), self.connection.keys()
def add_task(project): store_questions(project) project_id=project.id project_path=session["zzz"] if(session.get("question") is not None): for i in ["images","videos","documents","audios"]: if os.path.exists(project_path+"/"+i): print "in if" for file in os.listdir(project_path+"/"+i): p=os.path.join(project_path+"/"+i,file) p=p[p.rfind("uploads"):] print p dictobj={"type":i,"url":"/"+p,"subtype":file.rsplit('.',1)[1].lower()} s=json.dumps(dictobj) #print s.type task = Task(project_id=project_id) task.info=dictobj task_repo.save(task) session.pop('question', None)
def test_00_limits_query(self): """Test API GET limits works""" for i in range(30): app = App(name="name%s" % i, short_name="short_name%s" % i, description="desc", owner_id=1) info = dict(a=0) task = Task(app_id=1, info=info) taskrun = TaskRun(app_id=1, task_id=1) db.session.add(app) db.session.add(task) db.session.add(taskrun) db.session.commit() res = self.app.get('/api/app') print res.data data = json.loads(res.data) assert len(data) == 20, len(data) res = self.app.get('/api/app?limit=10') data = json.loads(res.data) assert len(data) == 10, len(data) res = self.app.get('/api/app?limit=10&offset=10') data = json.loads(res.data) assert len(data) == 10, len(data) assert data[0].get('name') == 'name9' res = self.app.get('/api/task') data = json.loads(res.data) assert len(data) == 20, len(data) res = self.app.get('/api/taskrun') data = json.loads(res.data) assert len(data) == 20, len(data) # Register 30 new users to test limit on users too for i in range(30): self.register(fullname="User%s" % i, username="******" % i) res = self.app.get('/api/user') data = json.loads(res.data) assert len(data) == 20, len(data) res = self.app.get('/api/user?limit=10') data = json.loads(res.data) print data assert len(data) == 10, len(data) res = self.app.get('/api/user?limit=10&offset=10') data = json.loads(res.data) assert len(data) == 10, len(data) assert data[0].get('name') == 'user7', data
def test_task_save_sufficient_permissions(self): patched_levels = self.patched_levels( valid_project_levels_for_task_level={'A': ['B']}, valid_task_levels_for_project_level={'A': ['B']} ) with patch.dict(data_access.data_access_levels, patched_levels): project = ProjectFactory.create(info={ 'data_access': ['A'], 'ext_config': {'data_access': {'tracking_id': '123'}} }) task = Task(project_id=project.id, info={'data_access': ['A']}) task_repo.save(task)
def timer_function(): #Continue code here image_source = sentinel.slave.lpop("GEOTAGX-SOURCERER-QUEUE") if image_source: split_image_source = image_source.split(DELIMITER) base64Data = split_image_source[-1] decodedJSONString = base64.b64decode(base64Data) parsedJSONObject = json.loads(decodedJSONString) if parsedJSONObject['source'] == 'geotagx-chrome-sourcerer': #Handle Chrome Sourcerer #TODO : Refactor into an OOP based implementation SOURCE_URI = parsedJSONObject['source_uri'] IMAGE_URL = parsedJSONObject['image_url'] for category in parsedJSONObject['categories']: category_objects = Category.query.filter( Category.short_name == category) for category_object in category_objects: related_projects = Project.query.filter( Project.category == category_object) for related_project in related_projects: # Start building Task Object _task_object = Task() _task_object.project_id = related_project.id # Build Info Object from whatever data we have _info_object = {} _info_object['image_url'] = IMAGE_URL _info_object['source_uri'] = SOURCE_URI _info_object['id'] = parsedJSONObject['source'] + "_" + \ ''.join(random.choice('0123456789ABCDEF') for i in range(16)) _task_object.info = _info_object db.session.add(_task_object) db.session.commit() print _now(), _task_object else: print _now(), "GEOTAGX-SOURCERER-QUEUE Empty....."
def create_app_with_contributors(self, anonymous, registered, two_tasks=False, name='my_app'): app = App(name=name, short_name='%s_shortname' % name, description=u'description') app.owner = self.user db.session.add(app) task = Task(app=app) db.session.add(task) if two_tasks: task2 = Task(app=app) db.session.add(task2) db.session.commit() for i in range(anonymous): task_run = TaskRun(app_id = app.id, task_id = 1, user_ip = '127.0.0.%s' % i) db.session.add(task_run) if two_tasks: task_run2 = TaskRun(app_id = app.id, task_id = 2, user_ip = '127.0.0.%s' % i) db.session.add(task_run2) for i in range(registered): user = User(email_addr = "*****@*****.**" % i, name = "user%s" % i, passwd_hash = "1234%s" % i, fullname = "user_fullname%s" % i) db.session.add(user) task_run = TaskRun(app_id = app.id, task_id = 1, user = user) db.session.add(task_run) if two_tasks: task_run2 = TaskRun(app_id = app.id, task_id = 2, user = user) db.session.add(task_run2) db.session.commit() return app
def timer_function(): #Continue code here image_source = sentinel.slave.lpop("GEOTAGX-SOURCERER-QUEUE") if image_source: split_image_source = image_source.split(DELIMITER) base64Data = split_image_source[-1] decodedJSONString = base64.b64decode(base64Data) parsedJSONObject = json.loads(decodedJSONString) if parsedJSONObject['source'] == 'geotagx-chrome-sourcerer': #Handle Chrome Sourcerer #TODO : Refactor into an OOP based implementation SOURCE_URI = parsedJSONObject['source_uri'] IMAGE_URL = parsedJSONObject['image_url'] for category in parsedJSONObject['categories']: category_objects = Category.query.filter(Category.short_name == category) for category_object in category_objects: related_projects = Project.query.filter(Project.category == category_object) for related_project in related_projects: # Start building Task Object _task_object = Task() _task_object.project_id = related_project.id # Build Info Object from whatever data we have _info_object = {} _info_object['image_url'] = IMAGE_URL _info_object['source_uri'] = SOURCE_URI _info_object['id'] = parsedJSONObject['source'] + "_" + \ ''.join(random.choice('0123456789ABCDEF') for i in range(16)) _task_object.info = _info_object db.session.add(_task_object) db.session.commit() print _now(), _task_object else: print _now(), "GEOTAGX-SOURCERER-QUEUE Empty....."
def get_random_ongoing_task(self, project_id, user_id, user_ip): # If an authenticated user requests for a random task if user_id is not None: sql = text(''' SELECT * FROM "task" LEFT JOIN "task_run" ON task_run.task_id = task.id WHERE task.project_id = :project_id AND is_broken=FALSE AND task.state = 'ongoing' AND (task_run.task_id IS NULL OR (task_run.task_id IS NOT NULL AND (task_run.user_id != :user_id OR task_run.user_id IS NULL))) ORDER BY random() LIMIT 1; ''') task_row_proxy = self.db.session.execute( sql, dict(project_id=project_id, user_id=user_id)).fetchone() return Task(task_row_proxy) # If an anonymous user requests for a random task elif user_ip is not None: sql = text(''' SELECT * FROM "task" LEFT JOIN "task_run" ON task_run.task_id = task.id AND WHERE task.project_id = :project_id ais_broken=FALSE AND task.state = 'ongoing' AND (task_run.task_id IS NULL OR (task_run.task_id IS NOT NULL AND (task_run.user_ip != :user_ip OR task_run.user_ip IS NULL))) ORDER BY random() LIMIT 1; ''') task_row_proxy = self.db.session.execute( sql, dict(project_id=project_id, user_ip=user_ip)).fetchone() return Task(task_row_proxy) # Normally, this is an unreachable return statement return None
def _test_get_breadth_first_task(self, user=None): self.del_task_runs() if user: short_name = 'xyzuser' else: short_name = 'xyznouser' category = db.session.query(Category).get(1) project = Project(short_name=short_name, name=short_name, description=short_name, category=category) owner = db.session.query(User).get(1) project.owner = owner task = Task(project=project, state='0', info={}) task2 = Task(project=project, state='0', info={}) task.project = project task2.project = project db.session.add(project) db.session.add(task) db.session.add(task2) db.session.commit() taskid = task.id projectid = project.id # give task2 a bunch of runs for idx in range(2): self._add_task_run(project, task2) # now check we get task without task runs as anonymous user out = pybossa.sched.get_breadth_first_task(projectid) assert out.id == taskid, out # now check we get task without task runs as a user owner = db.session.query(User).get(1) out = pybossa.sched.get_breadth_first_task(projectid, owner.id) assert out.id == taskid, out # now check that offset works out1 = pybossa.sched.get_breadth_first_task(projectid) out2 = pybossa.sched.get_breadth_first_task(projectid, offset=1) assert out1.id != out2.id, out # asking for a bigger offset (max 10) out2 = pybossa.sched.get_breadth_first_task(projectid, offset=11) assert out2 is None, out self._add_task_run(project, task) out = pybossa.sched.get_breadth_first_task(projectid) assert out.id == taskid, out # now add 2 more taskruns. We now have 3 and 2 task runs per task self._add_task_run(project, task) self._add_task_run(project, task) out = pybossa.sched.get_breadth_first_task(projectid) assert out.id == task2.id, out
def test_get_generator(self, mock_client): """Test that the correct Annotation generator is returned.""" base = Base(None) task = Task(project_id=1) generator = base._get_generator(task.id) assert_equal(generator, [{ "id": flask_app.config.get('GITHUB_REPO'), "type": "Software", "name": "LibCrowds", "homepage": flask_app.config.get('SPA_SERVER_NAME') }, { "id": url_for('api.api_task', oid=task.id), "type": "Software" }])