diff --git a/pybossa/model/counter.py b/pybossa/model/counter.py
deleted file mode 100644
index 36f0308b9..000000000
--- a/pybossa/model/counter.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# -*- coding: utf8 -*-
-# This file is part of PYBOSSA.
-#
-# Copyright (C) 2017 Scifabric LTD.
-#
-# PYBOSSA is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# PYBOSSA is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with PYBOSSA. If not, see .
-
-from sqlalchemy import Integer
-from sqlalchemy.schema import Column, ForeignKey
-from sqlalchemy.dialects.postgresql import TIMESTAMP
-from pybossa.core import db
-from pybossa.model import DomainObject, make_timestamp
-
-
-class Counter(db.Model, DomainObject):
- '''A Counter lists the number of task runs for a given Task.'''
-
- __tablename__ = 'counter'
-
- #: Counter.ID
- id = Column(Integer, primary_key=True)
- #: UTC timestamp when the counter was created.
- created = Column(TIMESTAMP, default=make_timestamp)
- #: Project.ID that this counter is associated with.
- project_id = Column(Integer, ForeignKey('project.id',
- ondelete='CASCADE'),
- nullable=False)
- #: Task.ID that this counter is associated with.
- task_id = Column(Integer, ForeignKey('task.id',
- ondelete='CASCADE'),
- nullable=False)
- #: Number of task_runs for this task.
- n_task_runs = Column(Integer, default=0, nullable=False)
diff --git a/pybossa/sched.py b/pybossa/sched.py
index 11efc8f4f..750c9ffcc 100644
--- a/pybossa/sched.py
+++ b/pybossa/sched.py
@@ -64,12 +64,9 @@ def new_task(project_id, sched, user_id=None, user_ip=None,
"""Get a new task by calling the appropriate scheduler function."""
sched_map = {
'default': get_locked_task,
- 'breadth_first': get_breadth_first_task,
- 'depth_first': get_depth_first_task,
Schedulers.locked: get_locked_task,
'incremental': get_incremental_task,
Schedulers.user_pref: get_user_pref_task,
- 'depth_first_all': get_depth_first_all_task,
Schedulers.task_queue: get_user_pref_task
}
scheduler = sched_map.get(sched, sched_map['default'])
@@ -138,59 +135,6 @@ def after_save(task_run, conn):
release_reserve_task_lock_by_id(task_run.project_id, task_run.task_id, uid, TIMEOUT)
-def get_breadth_first_task(project_id, user_id=None, user_ip=None,
- external_uid=None, offset=0, limit=1, orderby='id',
- desc=False, **kwargs):
- """Get a new task which have the least number of task runs."""
- project_query = session.query(Task.id).filter(Task.project_id==project_id,
- Task.state!='completed',
- Task.state!='enrich')
- if user_id and not user_ip and not external_uid:
- subquery = session.query(TaskRun.task_id).filter_by(project_id=project_id,
- user_id=user_id)
- else:
- if not user_ip: # pragma: no cover
- user_ip = '127.0.0.1'
- if user_ip and not external_uid:
- subquery = session.query(TaskRun.task_id).filter_by(project_id=project_id,
- user_ip=user_ip)
- else:
- subquery = session.query(TaskRun.task_id).filter_by(project_id=project_id,
- external_uid=external_uid)
-
- tmp = project_query.except_(subquery)
- query = session.query(Task, func.sum(Counter.n_task_runs).label('n_task_runs'))\
- .filter(Task.id==Counter.task_id)\
- .filter(Counter.task_id.in_(tmp))\
- .filter(or_(Task.expiration == None, Task.expiration > datetime.utcnow()))\
- .group_by(Task.id)\
- .order_by(text('n_task_runs ASC'))\
-
- query = _set_orderby_desc(query, orderby, desc)
- data = query.limit(limit).offset(offset).all()
- return _handle_tuples(data)
-
-
-def get_depth_first_task(project_id, user_id=None, user_ip=None,
- external_uid=None, offset=0, limit=1,
- orderby='priority_0', desc=True, **kwargs):
- """Get a new task for a given project."""
- tasks = get_candidate_task_ids(project_id, user_id,
- user_ip, external_uid, limit, offset,
- orderby=orderby, desc=desc)
- return tasks
-
-
-def get_depth_first_all_task(project_id, user_id=None, user_ip=None,
- external_uid=None, offset=0, limit=1,
- orderby='priority_0', desc=True, **kwargs):
- """Get a new task for a given project."""
- tasks = get_candidate_task_ids(project_id, user_id,
- user_ip, external_uid, limit, offset,
- orderby=orderby, desc=desc, completed=False)
- return tasks
-
-
def get_incremental_task(project_id, user_id=None, user_ip=None,
external_uid=None, offset=0, limit=1, orderby='id',
desc=False, **kwargs):
@@ -850,11 +794,9 @@ def get_project_scheduler(project_id, conn):
def sched_variants():
- return [('default', 'Default'), ('breadth_first', 'Breadth First'),
- ('depth_first', 'Depth First'),
+ return [('default', 'Default'),
(Schedulers.locked, 'Locked'),
(Schedulers.user_pref, 'User Preference Scheduler'),
- ('depth_first_all', 'Depth First All'),
(Schedulers.task_queue, 'Task Queues')
]
diff --git a/test/test_api/test_task_api.py b/test/test_api/test_task_api.py
index 03ac07a76..2125ba807 100644
--- a/test/test_api/test_task_api.py
+++ b/test/test_api/test_task_api.py
@@ -21,7 +21,6 @@
from nose.tools import assert_equal
from pybossa.api.task import TaskAPI
-from pybossa.model.counter import Counter
from pybossa.repositories import ProjectRepository
from pybossa.repositories import ResultRepository
from pybossa.repositories import TaskRepository
diff --git a/test/test_auditlog.py b/test/test_auditlog.py
index 9970ee123..631875977 100644
--- a/test/test_auditlog.py
+++ b/test/test_auditlog.py
@@ -198,11 +198,10 @@ def test_project_update_scheduler(self):
owner_id = project.owner.id
owner_name = project.owner.name
- data = {'info': {'sched': 'depth_first', 'data_classification': dict(input_data="L4 - public", output_data="L4 - public")}}
+ data = {'info': {'sched': 'task_queue_scheduler', 'data_classification': dict(input_data="L4 - public", output_data="L4 - public")}}
url = '/api/project/%s?api_key=%s' % (project.id, project.owner.api_key)
self.app.put(url, data=json.dumps(data))
logs = auditlog_repo.filter_by(project_id=project.id)
-
assert len(logs) == 1, logs
for log in logs:
assert log.user_id == owner_id, log.user_id
@@ -221,7 +220,7 @@ def test_project_update_two_info_objects(self):
owner_id = project.owner.id
owner_name = project.owner.name
- data = {'info': {'sched': 'depth_first', 'task_presenter': 'new', 'data_classification': dict(input_data="L4 - public", output_data="L4 - public")}}
+ data = {'info': {'sched': 'task_queue_scheduler', 'task_presenter': 'new', 'data_classification': dict(input_data="L4 - public", output_data="L4 - public")}}
attributes = list(data['info'].keys())
url = '/api/project/%s?api_key=%s' % (project.id, project.owner.api_key)
self.app.put(url, data=json.dumps(data))
@@ -564,7 +563,7 @@ def test_project_task_scheduler(self):
attribute = 'sched'
- new_string = 'depth_first'
+ new_string = 'locked_scheduler'
old_value = 'default'
diff --git a/test/test_sched.py b/test/test_sched.py
index 622716204..0fd0635e6 100644
--- a/test/test_sched.py
+++ b/test/test_sched.py
@@ -119,46 +119,6 @@ def test_anonymous_03_respects_limit_tasks(self):
assert data.get('id'), data
- @with_context
- def test_newtask_default_orderby(self):
- """Test SCHED depth first works with orderby."""
- project = ProjectFactory.create(info=dict(sched="depth_first"))
- task1 = TaskFactory.create(project=project, fav_user_ids=None)
- task2 = TaskFactory.create(project=project, fav_user_ids=[1,2,3])
- api_key = project.owner.api_key
-
- url = "/api/project/%s/newtask?orderby=%s&desc=%s&api_key=%s" % (project.id, 'id', False, api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'] == task1.id, data
-
- url = "/api/project/%s/newtask?orderby=%s&desc=%s&api_key=%s" % (project.id, 'id', True, api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'] == task2.id, data
-
- url = "/api/project/%s/newtask?orderby=%s&desc=%s&api_key=%s" % (project.id, 'created', False, api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'] == task1.id, data
-
- url = "/api/project/%s/newtask?orderby=%s&desc=%s&api_key=%s" % (project.id, 'created', True, api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'] == task2.id, data
-
- url = "/api/project/%s/newtask?orderby=%s&desc=%s&api_key=%s" % (project.id, 'fav_user_ids', False, api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'] == task1.id, data
-
- url = "/api/project/%s/newtask?orderby=%s&desc=%s&api_key=%s" % (project.id, 'fav_user_ids', True, api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'] == task2.id, data
- assert data['fav_user_ids'] == task2.fav_user_ids, data
-
-
@with_context
def test_user_01_newtask(self):
""" Test SCHED newtask returns a Task for John Doe User"""
diff --git a/test/test_sched_depth_first_all.py b/test/test_sched_depth_first_all.py
deleted file mode 100644
index 2d59ac7c4..000000000
--- a/test/test_sched_depth_first_all.py
+++ /dev/null
@@ -1,752 +0,0 @@
-# -*- coding: utf8 -*-
-# This file is part of PYBOSSA.
-#
-# Copyright (C) 2015 Scifabric LTD.
-#
-# PYBOSSA is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# PYBOSSA is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with PYBOSSA. If not, see .
-
-import json
-
-from pybossa.model.task import Task
-from pybossa.model.task_run import TaskRun
-from pybossa.sched import get_depth_first_all_task
-from test import db, with_context
-from test.factories import AnonymousTaskRunFactory
-from test.factories import TaskFactory, ProjectFactory, TaskRunFactory, \
- UserFactory
-from test.helper import sched
-
-
-class TestSched(sched.Helper):
-
- endpoints = ['project', 'task', 'taskrun']
-
- def get_headers_jwt(self, project):
- """Return headesr JWT token."""
- # Get JWT token
- url = 'api/auth/project/%s/token' % project.short_name
-
- res = self.app.get(url, headers={'Authorization': project.secret_key})
-
- authorization_token = 'Bearer %s' % res.data
-
- return {'Authorization': authorization_token}
-
- # Tests
- @with_context
- def test_anonymous_01_newtask(self):
- """ Test SCHED newtask does not returns a Task for the Anonymous User"""
- project = ProjectFactory.create(info=dict(sched='depth_first_all'))
- TaskFactory.create_batch(2, project=project, info='hola')
-
- res = self.app.get('api/project/%s/newtask' %project.id)
- data = json.loads(res.data)
- task_id = data['id']
- assert 'error' in data['info']
-
- @with_context
- def test_external_uid_02_gets_different_tasks(self):
- """ Test SCHED newtask does not return tasks
- for a external User ID."""
- assigned_tasks = []
- # Get a Task until scheduler returns None
- project = ProjectFactory.create(info=dict(sched='depth_first_all'))
-
- tasks = TaskFactory.create_batch(3, project=project, info={})
-
- headers = self.get_headers_jwt(project)
-
- url = 'api/project/%s/newtask?external_uid=%s' % (project.id, '1xa')
-
- res = self.app.get(url, headers=headers)
- data = json.loads(res.data)
- assert 'error' in data['info']
-
- @with_context
- def test_anonymous_03_respects_limit_tasks(self):
- """ Test SCHED newtask respects the limit of 10 TaskRuns per Task"""
- assigned_tasks = []
- project = ProjectFactory.create(owner=UserFactory.create(id=500),
- info=dict(sched='depth_first_all'))
-
- user = UserFactory.create()
-
- task = TaskFactory.create(project=project, n_answers=10)
-
- tasks = get_depth_first_all_task(project.id, user.id)
- assert len(tasks) == 1, len(tasks)
- assert tasks[0].id == task.id, tasks
- assert tasks[0].state == 'ongoing', tasks
-
- for i in range(10):
- tr = TaskRun(project_id=project.id,
- task_id=task.id,
- user_ip='127.0.0.%s' % i)
- db.session.add(tr)
- db.session.commit()
-
- tasks = get_depth_first_all_task(project.id, user.id)
- assert len(tasks) == 1, len(tasks)
- assert tasks[0].id == task.id, tasks
- assert tasks[0].state == 'completed', tasks
-
- for i in range(10):
- tasks = get_depth_first_all_task(project.id,
- user_id=None,
- user_ip='127.0.0.%s' % i)
- assert len(tasks) == 0, tasks
-
- tr = TaskRun(project_id=project.id,
- task_id=task.id,
- user_id=user.id)
- db.session.add(tr)
- db.session.commit()
- tasks = get_depth_first_all_task(project.id, user.id)
- assert len(tasks) == 0, tasks
-
-
- @with_context
- def test_anonymous_03_respects_limit_tasks_limits(self):
- """ Test SCHED newtask limit respects the limit of 30 TaskRuns per Task using limits"""
- assigned_tasks = []
- user = UserFactory.create()
- project = ProjectFactory.create(info=dict(sched='depth_first_all'))
-
- orig_tasks = TaskFactory.create_batch(2, project=project, n_answers=5)
-
- tasks = get_depth_first_all_task(project.id, user.id, limit=2)
- assert len(tasks) == 2, len(tasks)
- assert tasks[0].id == orig_tasks[0].id, tasks
- assert tasks[1].id == orig_tasks[1].id, tasks
-
- for i in range(5):
- tr = TaskRun(project_id=project.id,
- task_id=tasks[0].id,
- user_ip='127.0.0.%s' % i)
- db.session.add(tr)
- db.session.commit()
-
- # Task should be marked as completed, but as user has no
- # participated it should get the completed one as well.
- tasks = get_depth_first_all_task(project.id, user.id, limit=2,
- orderby='id', desc=False)
- assert len(tasks) == 2, len(tasks)
- assert tasks[0].id == orig_tasks[0].id, tasks[0]
- assert tasks[0].state == 'completed', tasks[0].state
- assert len(tasks[0].task_runs) == 5
- assert tasks[1].id == orig_tasks[1].id
- assert tasks[1].state == 'ongoing', tasks[1].state
- assert len(tasks[1].task_runs) == 0
-
- # User contributes, so only one task should be returned
- tr = TaskRun(project_id=project.id,
- task_id=tasks[0].id,
- user_id=user.id)
- db.session.add(tr)
- db.session.commit()
-
- tasks = get_depth_first_all_task(project.id, user.id, limit=2,
- orderby='id', desc=False)
- assert len(tasks) == 1, len(tasks)
- assert tasks[0].id == orig_tasks[1].id, tasks[0]
- assert tasks[0].state == 'ongoing', tasks[0].state
- assert len(tasks[0].task_runs) == 0
-
-
- @with_context
- def test_external_uid_03_respects_limit_tasks(self):
- """ Test SCHED newtask external uid respects the limit of 30 TaskRuns per Task for
- external user id"""
- assigned_tasks = []
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=UserFactory.create(id=500))
- user = UserFactory.create()
-
- task = TaskFactory.create(project=project, n_answers=10)
-
- uid = '1xa'
- tasks = get_depth_first_all_task(project.id, external_uid=uid)
- assert len(tasks) == 1, len(tasks)
- assert tasks[0].id == task.id, tasks
- assert tasks[0].state == 'ongoing', tasks
-
- # Add taskruns
- for i in range(10):
- tr = TaskRun(project_id=project.id,
- task_id=task.id,
- user_ip='127.0.0.%s' % i)
- db.session.add(tr)
- db.session.commit()
-
- tasks = get_depth_first_all_task(project.id, external_uid=uid)
- assert len(tasks) == 1, len(tasks)
- assert tasks[0].id == task.id, tasks
- assert tasks[0].state == 'completed', tasks
- assert len(tasks[0].task_runs) == 10, tasks
-
- @with_context
- def test_external_uid_03_respects_limit_tasks_limits(self):
- """ Test SCHED newtask external uid limits respects the limit of 30 TaskRuns per list of Tasks for
- external user id"""
- # Get Task until scheduler returns None
- project = ProjectFactory.create(info=dict(sched='depth_first_all'))
-
- orig_tasks = TaskFactory.create_batch(2, project=project, n_answers=5)
- headers = self.get_headers_jwt(project)
- uid = '1xa'
- url = 'api/project/%s/newtask?external_uid=%s&limit=2' % (project.id,
- uid)
- tasks = get_depth_first_all_task(project.id, external_uid=uid, limit=2)
- assert len(tasks) == 2, len(tasks)
- assert tasks[0].id == orig_tasks[0].id, tasks
- assert tasks[0].state == 'ongoing', tasks
- assert tasks[1].id == orig_tasks[1].id, tasks
- assert tasks[1].state == 'ongoing', tasks
-
- @with_context
- def test_newtask_default_orderby(self):
- """Test SCHED depth first works with orderby."""
- project = ProjectFactory.create(info=dict(sched="depth_first_all"))
- task1 = TaskFactory.create(project=project, fav_user_ids=None)
- task2 = TaskFactory.create(project=project, fav_user_ids=[1,2,3])
- api_key = project.owner.api_key
-
- url = "/api/project/%s/newtask?orderby=%s&desc=%s&api_key=%s" % (project.id, 'id', False, api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'] == task1.id, data
-
- url = "/api/project/%s/newtask?orderby=%s&desc=%s&api_key=%s" % (project.id, 'id', True, api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'] == task2.id, data
-
- url = "/api/project/%s/newtask?orderby=%s&desc=%s&api_key=%s" % (project.id, 'created', False, api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'] == task1.id, data
-
- url = "/api/project/%s/newtask?orderby=%s&desc=%s&api_key=%s" % (project.id, 'created', True, api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'] == task2.id, data
-
- url = "/api/project/%s/newtask?orderby=%s&desc=%s&api_key=%s" % (project.id, 'fav_user_ids', False, api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'] == task1.id, data
-
- url = "/api/project/%s/newtask?orderby=%s&desc=%s&api_key=%s" % (project.id, 'fav_user_ids', True, api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'] == task2.id, data
- assert data['fav_user_ids'] == task2.fav_user_ids, data
-
-
- @with_context
- def test_user_01_newtask(self):
- """ Test SCHED newtask returns a Task for John Doe User"""
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=UserFactory.create(id=500))
-
- TaskFactory.create_batch(2, project=project, n_answers=2)
-
- # Register
- self.register()
- self.signin()
- url = 'api/project/%s/newtask' % project.id
- self.set_proj_passwd_cookie(project, username='johndoe')
- res = self.app.get(url)
- data = json.loads(res.data)
- task_id = data['id']
- assert data['id'], data
-
- taskrun = dict(project_id=data['project_id'], task_id=data['id'], info="hola")
- res = self.app.post('api/taskrun', data=json.dumps(taskrun))
-
- res = self.app.get(url)
- data = json.loads(res.data)
- assert data['id'], data
- assert data['id'] != task_id, data
-
- self.signout()
-
- @with_context
- def test_user_01_newtask_limits(self):
- """ Test SCHED newtask returns a Task for John Doe User with limits"""
- self.register()
- self.signin()
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=UserFactory.create(id=500))
-
- tasks = TaskFactory.create_batch(10, project=project, info=dict(foo=1))
-
- # Register
- url = 'api/project/%s/newtask?limit=2' % project.id
- res = self.app.get(url)
- data = json.loads(res.data)
- assert len(data) == 2, data
- for t in data:
- assert t['info']['foo'] == 1, t
- self.signout()
-
- @with_context
- def test_user_02_gets_different_tasks(self):
- """ Test SCHED newtask returns N different Tasks for John Doe User"""
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=UserFactory.create(id=500))
-
- TaskFactory.create_batch(10, project=project)
-
- # Register
- self.register()
- self.signin()
-
- assigned_tasks = []
- # Get Task until scheduler returns None
- url = 'api/project/%s/newtask' % project.id
- self.set_proj_passwd_cookie(project, username='johndoe')
- res = self.app.get(url)
- data = json.loads(res.data)
- while data.get('id') is not None:
- # Check that we received a Task
- assert data.get('id'), data
-
- # Save the assigned task
- assigned_tasks.append(data)
-
- # Submit an Answer for the assigned task
- tr = dict(project_id=data['project_id'], task_id=data['id'],
- info={'answer': 'No'})
- tr = json.dumps(tr)
-
- self.app.post('/api/taskrun', data=tr)
- res = self.app.get(url)
- data = json.loads(res.data)
-
- # Check if we received the same number of tasks that the available ones
- tasks = db.session.query(Task).filter_by(project_id=1).all()
- assert len(assigned_tasks) == len(tasks), assigned_tasks
- # Check if all the assigned Task.id are equal to the available ones
- tasks = db.session.query(Task).filter_by(project_id=1).all()
- err_msg = "Assigned Task not found in DB Tasks"
- for at in assigned_tasks:
- assert self.is_task(at['id'], tasks), err_msg
- # Check that there are no duplicated tasks
- err_msg = "One Assigned Task is duplicated"
- for at in assigned_tasks:
- assert self.is_unique(at['id'], assigned_tasks), err_msg
-
- @with_context
- def test_user_02_gets_different_tasks_limit(self):
- """ Test SCHED newtask returns N different list of Tasks for John Doe User"""
- # Register
- self.register()
- self.signin()
-
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=UserFactory.create(id=500))
-
- TaskFactory.create_batch(10, project=project)
-
- assigned_tasks = []
- # Get Task until scheduler returns None
- url = 'api/project/%s/newtask?limit=5' % project.id
- res = self.app.get(url)
- data = json.loads(res.data)
- while len(data) > 0:
- # Check that we received a Task
- for t in data:
- assert t.get('id'), t
-
- # Save the assigned task
- assigned_tasks.append(t)
-
- # Submit an Answer for the assigned task
- tr = dict(project_id=t['project_id'], task_id=t['id'],
- info={'answer': 'No'})
- tr = json.dumps(tr)
-
- self.app.post('/api/taskrun', data=tr)
- res = self.app.get(url)
- data = json.loads(res.data)
-
- # Check if we received the same number of tasks that the available ones
- tasks = db.session.query(Task).filter_by(project_id=1).all()
- assert len(assigned_tasks) == len(tasks), assigned_tasks
- # Check if all the assigned Task.id are equal to the available ones
- tasks = db.session.query(Task).filter_by(project_id=1).all()
- err_msg = "Assigned Task not found in DB Tasks"
- for at in assigned_tasks:
- assert self.is_task(at['id'], tasks), err_msg
- # Check that there are no duplicated tasks
- err_msg = "One Assigned Task is duplicated"
- for at in assigned_tasks:
- assert self.is_unique(at['id'], assigned_tasks), err_msg
-
-
- @with_context
- def test_user_03_respects_limit_tasks(self):
- """ Test SCHED newtask respects the limit of 30 TaskRuns per Task"""
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=UserFactory.create(id=500))
- orig_tasks = TaskFactory.create_batch(1, project=project, n_answers=10)
- user = UserFactory.create()
-
- tasks = get_depth_first_all_task(project.id, user.id)
- assert len(tasks) == 1, len(tasks)
- assert tasks[0].id == orig_tasks[0].id, tasks
- assert tasks[0].state == 'ongoing', tasks
-
- for i in range(10):
- tr = TaskRun(project_id=project.id,
- task_id=orig_tasks[0].id,
- user_ip='127.0.0.%s' % i)
- db.session.add(tr)
- db.session.commit()
-
- tasks = get_depth_first_all_task(project.id, user.id)
- assert len(tasks) == 1, len(tasks)
- assert tasks[0].id == orig_tasks[0].id, tasks
- assert tasks[0].state == 'completed', tasks
- assert len(tasks[0].task_runs) == 10, tasks
-
- tr = TaskRun(project_id=project.id,
- task_id=orig_tasks[0].id,
- user_id=user.id)
- db.session.add(tr)
- db.session.commit()
-
- tasks = get_depth_first_all_task(project.id, user.id)
-
- assert len(tasks) == 0, tasks
-
-
- @with_context
- def test_user_03_respects_limit_tasks_limit(self):
- """ Test SCHED limit arg newtask respects the limit of 30 TaskRuns per list of Tasks"""
- # Del previous TaskRuns
- assigned_tasks = []
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=UserFactory.create(id=500))
-
- user = UserFactory.create()
-
- orig_tasks = TaskFactory.create_batch(2, project=project, n_answers=10)
-
- tasks = get_depth_first_all_task(project.id, user.id,
- limit=2, orderby='id',
- desc=False)
- assert len(tasks) == 2, len(tasks)
- assert tasks[0].id == orig_tasks[0].id, tasks
- assert tasks[0].state == 'ongoing', tasks
- assert tasks[1].id == orig_tasks[1].id, tasks
- assert tasks[1].state == 'ongoing', tasks
-
- for i in range(10):
- tr = TaskRun(project_id=project.id,
- task_id=tasks[0].id,
- user_ip='127.0.0.%s' % i)
- db.session.add(tr)
- db.session.commit()
-
- tasks = get_depth_first_all_task(project.id, user.id,
- limit=2, orderby='id',
- desc=False)
- assert len(tasks) == 2, len(tasks)
- assert tasks[0].id == orig_tasks[0].id, tasks
- assert tasks[0].state == 'completed', tasks
- assert len(tasks[0].task_runs) == 10, tasks
- assert tasks[1].id == orig_tasks[1].id, tasks
- assert tasks[1].state == 'ongoing', tasks
- assert len(tasks[1].task_runs) == 0, tasks
-
- tr = TaskRun(project_id=project.id,
- task_id=tasks[0].id,
- user_id=user.id)
- db.session.add(tr)
- db.session.commit()
-
- tasks = get_depth_first_all_task(project.id, user.id,
- limit=2, orderby='id',
- desc=False)
-
- assert len(tasks) == 1, tasks
- assert tasks[0].id == orig_tasks[1].id
- assert tasks[0].state == 'ongoing'
-
-
-
- @with_context
- def test_task_preloading(self):
- """Test TASK Pre-loading works"""
- # Del previous TaskRuns
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=UserFactory.create(id=500))
-
- TaskFactory.create_batch(10, project=project)
-
- # Register
- self.register()
- self.signin()
-
- assigned_tasks = []
- # Get Task until scheduler returns None
- self.set_proj_passwd_cookie(project, username='johndoe')
- url = 'api/project/%s/newtask' % project.id
- res = self.app.get(url)
- task1 = json.loads(res.data)
- # Check that we received a Task
- assert task1.get('id'), task1
- # Pre-load the next task for the user
- res = self.app.get(url + '?offset=1')
- task2 = json.loads(res.data)
- # Check that we received a Task
- assert task2.get('id'), task2
- # Check that both tasks are different
- assert task1.get('id') != task2.get('id'), "Tasks should be different"
- ## Save the assigned task
- assigned_tasks.append(task1)
- assigned_tasks.append(task2)
-
- # Submit an Answer for the assigned and pre-loaded task
- for t in assigned_tasks:
- tr = dict(project_id=t['project_id'], task_id=t['id'], info={'answer': 'No'})
- tr = json.dumps(tr)
-
- self.app.post('/api/taskrun', data=tr)
- # Get two tasks again
- res = self.app.get(url)
- task3 = json.loads(res.data)
- # Check that we received a Task
- assert task3.get('id'), task1
- # Pre-load the next task for the user
- res = self.app.get(url + '?offset=1')
- task4 = json.loads(res.data)
- # Check that we received a Task
- assert task4.get('id'), task2
- # Check that both tasks are different
- assert task3.get('id') != task4.get('id'), "Tasks should be different"
- assert task1.get('id') != task3.get('id'), "Tasks should be different"
- assert task2.get('id') != task4.get('id'), "Tasks should be different"
- # Check that a big offset returns None
- res = self.app.get(url + '?offset=11')
- assert json.loads(res.data) == {}, res.data
-
- @with_context
- def test_task_preloading_limit(self):
- """Test TASK Pre-loading with limit works"""
- # Register
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=UserFactory.create(id=500))
-
- TaskFactory.create_batch(10, project=project)
- self.register()
- self.signin()
-
- assigned_tasks = []
- url = 'api/project/%s/newtask?limit=2' % project.id
- self.set_proj_passwd_cookie(project, username='johndoe')
- res = self.app.get(url)
- tasks1 = json.loads(res.data)
- # Check that we received a Task
- for t in tasks1:
- assert t.get('id'), t
- # Pre-load the next tasks for the user
- res = self.app.get(url + '&offset=2')
- tasks2 = json.loads(res.data)
- # Check that we received a Task
- for t in tasks2:
- assert t.get('id'), t
- # Check that both tasks are different
- tasks1_ids = set([t['id'] for t in tasks1])
- tasks2_ids = set([t['id'] for t in tasks2])
- assert len(tasks1_ids.union(tasks2_ids)) == 4, "Tasks should be different"
- ## Save the assigned task
- for t in tasks1:
- assigned_tasks.append(t)
- for t in tasks2:
- assigned_tasks.append(t)
-
- # Submit an Answer for the assigned and pre-loaded task
- for t in assigned_tasks:
- tr = dict(project_id=t['project_id'], task_id=t['id'], info={'answer': 'No'})
- tr = json.dumps(tr)
-
- self.app.post('/api/taskrun', data=tr)
- # Get two tasks again
- res = self.app.get(url)
- tasks3 = json.loads(res.data)
- # Check that we received a Task
- for t in tasks3:
- assert t.get('id'), t
- # Pre-load the next task for the user
- res = self.app.get(url + '&offset=2')
- tasks4 = json.loads(res.data)
- # Check that we received a Task
- for t in tasks4:
- assert t.get('id'), t
- # Check that both tasks are different
- tasks3_ids = set([t['id'] for t in tasks3])
- tasks4_ids = set([t['id'] for t in tasks4])
- assert len(tasks3_ids.union(tasks4_ids)) == 4, "Tasks should be different"
-
- # Check that a big offset returns None
- res = self.app.get(url + '&offset=11')
- assert json.loads(res.data) == {}, res.data
-
- @with_context
- def test_task_priority(self):
- """Test SCHED respects priority_0 field"""
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=UserFactory.create(id=500))
-
- TaskFactory.create_batch(10, project=project)
-
- # Register
- self.register()
- self.signin()
-
- # By default, tasks without priority should be ordered by task.id (FIFO)
- tasks = db.session.query(Task).filter_by(project_id=1).order_by('id').all()
- url = 'api/project/%s/newtask' % project.id
- self.set_proj_passwd_cookie(project, username='johndoe')
- res = self.app.get(url)
- task1 = json.loads(res.data)
- # Check that we received a Task
- err_msg = "Task.id should be the same"
- assert task1.get('id') == tasks[0].id, err_msg
-
- # Now let's change the priority to a random task
- import random
- t = random.choice(tasks)
- # Increase priority to maximum
- t.priority_0 = 1
- db.session.add(t)
- db.session.commit()
- # Request again a new task
- res = self.app.get(url + '?orderby=priority_0&desc=true')
- task1 = json.loads(res.data)
- # Check that we received a Task
- err_msg = "Task.id should be the same"
- assert task1.get('id') == t.id, err_msg
- err_msg = "Task.priority_0 should be the 1"
- assert task1.get('priority_0') == 1, err_msg
-
- @with_context
- def test_task_priority_limit(self):
- """Test SCHED respects priority_0 field with limit"""
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=UserFactory.create(id=500))
-
- TaskFactory.create_batch(10, project=project)
-
- # Register
- self.register()
- self.signin()
-
- # By default, tasks without priority should be ordered by task.id (FIFO)
- tasks = db.session.query(Task).filter_by(project_id=project.id).order_by('id').all()
- url = 'api/project/%s/newtask?limit=2' % project.id
- self.set_proj_passwd_cookie(project, username='johndoe')
- res = self.app.get(url)
- tasks1 = json.loads(res.data)
- # Check that we received a Task
- err_msg = "Task.id should be the same"
- assert tasks1[0].get('id') == tasks[0].id, err_msg
-
- # Now let's change the priority to a random task
- import random
- t = random.choice(tasks)
- # Increase priority to maximum
- t.priority_0 = 1
- db.session.add(t)
- db.session.commit()
- # Request again a new task
- res = self.app.get(url + '&orderby=priority_0&desc=true')
- tasks1 = json.loads(res.data)
- # Check that we received a Task
- err_msg = "Task.id should be the same"
- assert tasks1[0].get('id') == t.id, (err_msg, tasks1[0])
- err_msg = "Task.priority_0 should be the 1"
- assert tasks1[0].get('priority_0') == 1, err_msg
-
- def _add_task_run(self, app, task, user=None):
- tr = AnonymousTaskRunFactory.create(project=app, task=task)
-
- @with_context
- def test_no_more_tasks(self):
- """Test that a users gets always tasks"""
- owner = UserFactory.create()
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=owner,
- short_name='egil',
- name='egil',
- description='egil')
-
-
- project_id = project.id
-
- all_tasks = TaskFactory.create_batch(20, project=project, n_answers=10)
-
- for t in all_tasks[0:10]:
- TaskRunFactory.create_batch(10, task=t, project=project)
-
- tasks = db.session.query(Task).filter_by(project_id=project.id, state='ongoing').all()
- assert tasks[0].n_answers == 10
-
- url = 'api/project/%s/newtask?api_key=%s' % (project.id, owner.api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
-
- err_msg = "User should get a task"
- assert 'project_id' in data.keys(), err_msg
- assert data['project_id'] == project_id, err_msg
- assert data['id'] == all_tasks[0].id, err_msg
- assert data['state'] == 'completed', err_msg
-
- @with_context
- def test_no_more_tasks_limit(self):
- """Test that a users gets always tasks with limit"""
- owner = UserFactory.create()
- project = ProjectFactory.create(info=dict(sched='depth_first_all'),
- owner=owner,
- short_name='egil',
- name='egil',
- description='egil')
-
-
- project_id = project.id
-
- all_tasks = TaskFactory.create_batch(20, project=project, n_answers=10)
-
- for t in all_tasks[0:10]:
- TaskRunFactory.create_batch(10, task=t, project=project)
-
- tasks = db.session.query(Task).filter_by(project_id=project.id, state='ongoing').all()
- assert tasks[0].n_answers == 10
-
- url = 'api/project/%s/newtask?limit=2&orderby=id&api_key=%s' % (project_id, owner.api_key)
- res = self.app.get(url)
- data = json.loads(res.data)
-
- err_msg = "User should get a task"
- i = 0
- for t in data:
- print(t['id'])
- assert 'project_id' in t.keys(), err_msg
- assert t['project_id'] == project_id, err_msg
- assert t['id'] == all_tasks[i].id, (err_msg, t, all_tasks[i].id)
- assert t['state'] == 'completed', err_msg
- i += 1
diff --git a/test/test_web.py b/test/test_web.py
index 32c484a37..85b4aade3 100644
--- a/test/test_web.py
+++ b/test/test_web.py
@@ -7827,7 +7827,7 @@ def test_75_task_settings_scheduler(self, mock):
for i in range(0, 1):
if i == 0:
self.signin()
- sched = 'depth_first'
+ sched = 'locked_scheduler'
else:
sched = 'default'
self.signin()
@@ -7868,6 +7868,10 @@ def test_75_available_task_schedulers(self, mock):
self.new_project()
url = "/project/sampleapp/tasks/scheduler"
form_id = 'task_scheduler'
+ supported_schedulers = [
+ 'default', 'locked_scheduler', 'user_pref_scheduler', 'task_queue_scheduler',
+ 'userPrefLang', 'userPrefLoc'
+ ]
from pybossa.core import setup_schedulers
try:
@@ -7901,7 +7905,9 @@ def test_75_available_task_schedulers(self, mock):
options = dom.find_all('option')
scheds = [o.attrs['value'] for o in options]
assert 'user_pref_scheduler' in scheds
- assert 'breadth_first' in scheds
+
+ all_enabled_schedulers = all([sched in supported_schedulers for sched in scheds])
+ assert all_enabled_schedulers, scheds
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
@@ -7918,7 +7924,7 @@ def test_75_task_settings_scheduler_json(self, mock):
if i == 0:
# As owner
new_url = url + '?api_key=%s' % owner.api_key
- sched = 'depth_first'
+ sched = 'locked_scheduler'
else:
new_url = url + '?api_key=%s' % admin.api_key
sched = 'default'