Ejemplo n.º 1
0
    def test_task(self):
        task.add(subject="PHY",
                 cls="5C23",
                 title="test1",
                 due_date=datetime.date(2016, 8, 28),
                 usr=self.weijie)

        # Check task filtering by subject combination
        tasks = task.get_list(self.weijie)
        self.assertEqual(tasks[0].title, "test1")
        task_key = tasks[0].key.urlsafe()

        tasks = task.get_list(self.jerrayl)
        self.assertEqual(tasks, [])

        # Check edit & get task
        task.edit(subject="PHY",
                  cls="5C23",
                  title="test2",
                  due_date=datetime.date(2016, 8, 28),
                  url_id=task_key,
                  usr=self.weijie)

        task1 = task.get(task_key, self.weijie)
        assert task1.title == "test2"

        # Check delete task
        task.remove(task_key, self.weijie)
        assert not task.get_list(self.weijie)
Ejemplo n.º 2
0
def pipeline(config: settings.Config):
    task_handler = task.get(config)
    task_handler.load_model()
    task_handler.test_doc_vec()
    task_handler.test_user_vec()
    task_handler.test_user_doc_score()
    task_handler.test_correct()
    return 0
Ejemplo n.º 3
0
Archivo: views.py Proyecto: tmars/DS_CW
def car_info(car):
    return {
        'id': car.id,
        'name': str(car),
        'tarifs': [car.tarif1, car.tarif2, car.tarif3, car.tarif4],
        'image': task.get("B_URL")+ car.image.url[1:],
        'class': car.class_name,
        'body': car.body,
    }
Ejemplo n.º 4
0
def score(config: settings.Config):
    task_handler = task.get(config)
    with utils.open(next(task_handler), 'w') as file:
        model = next(task_handler)
        for batch_info, batch_data in task_handler:
            batch_pred = model.predict_on_batch(batch_data)
            for (session, label, score), pred in zip(batch_info, batch_pred):
                file.write('{}\t{}\t{}\t{}\n'.format(session, label, score,
                                                     pred[0]))

    return 0
Ejemplo n.º 5
0
def users(config: settings.Config):
    logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s',
                        level=logging.INFO,
                        handlers=[
                            logging.FileHandler(config.log_output),
                            logging.StreamHandler()
                        ])

    task_handler = task.get(config)
    task_handler.save_result()
    return 0
Ejemplo n.º 6
0
Archivo: api.py Proyecto: jerrayl/Eduto
 def get_task(self, request):
     t = task.get(url_id=request.url_id, usr=endpoints.get_current_user())
     return msg.TaskMessage(
         title=t.title,
         time_added=t.time_added,
         usr=t.usr.email(),
         subject=t.subject,
         due_date=datetime.datetime.combine(t.due_date,
                                            datetime.datetime.min.time()),
         cls=t.cls,
         description=t.description,
         school=t.school,
     )
Ejemplo n.º 7
0
def decline_task(task_id, multiple = False):
    
	# We retrieve the task from database. We only continue in case the task exists on DB and it's not obsolete.
	task=Task.get(task_id)
	if not task:
		return generate_html_reply("The requested task does not exist\n", "404")

	if task["active"] == 0:
		return generate_html_reply("This link is obsolete. The task have already been submited\n", "410")

	# Send the accept to Abiquo
	Task.cancel(task)

	# Disable the task on database, so next calls to the link will find the task obsolete
	Task.disable(task_id)
	
	if not multiple:
		 notifier.notify_answered_tasks([task['taskid']], False)
    
	return generate_html_reply("Task canceled. The task is now being canceled\n", "200")
Ejemplo n.º 8
0
def multiple_update(task_ids, action):
	all_tasks_id = task_ids.split(",")

	all_tasks_id_confirmed = []
	for task_to_confirm in all_tasks_id:
		task=Task.get(task_to_confirm)
		if task["active"] == 1:
			all_tasks_id_confirmed.append(task["taskid"])

	if not all_tasks_id_confirmed:
		return generate_html_reply("All task have already been decided", "410")

	for task_id in all_tasks_id_confirmed:
		if action == "cancel":
			decline_task(task_id, True)
		elif action == "accept":
			accept_task(task_id, True)
	
	notifier.notify_answered_tasks(all_tasks_id_confirmed, action)

	return generate_html_reply("The tasks " + task_ids + " are now being processed", "200")
Ejemplo n.º 9
0
def train(config: settings.Config):
    logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s',
                        level=logging.INFO,
                        handlers=[
                            logging.FileHandler(config.log_output),
                            logging.StreamHandler()
                        ])

    task_handler = task.get(config)

    training_data = task_handler.train

    for epoch in range(config.epochs):
        logging.info('[+] start epoch {}'.format(epoch))
        model = task_handler.build_model(epoch)
        history = model.fit_generator(
            training_data,
            task_handler.training_step,
            epochs=epoch + 1,
            initial_epoch=epoch,
            verbose=1 if config.debug and not config.background else 2)
        utils.logging_history(history)
        if hasattr(task_handler, 'callback'):
            task_handler.callback(epoch)
        try:
            evaluations = model.evaluate_generator(
                task_handler.valid,
                task_handler.validation_step,
                verbose=1 if config.debug and not config.background else 2)
            utils.logging_evaluation(
                dict(zip(model.metrics_names, evaluations)))
        except:
            pass
        if hasattr(task_handler, 'callback_valid'):
            task_handler.callback_valid(epoch)
        logging.info('[-] finish epoch {}'.format(epoch))

    task_handler.save_model()

    return 0
Ejemplo n.º 10
0
def notify_answered_tasks(task_ids, action):

	tasks = []
	for task_id in task_ids:
		tasks.append(Task.get(task_id))

	first_task = tasks[0]

	# Prepare the mail properties
	from_addr = config.get('mail', 'from')
	subject = config.get('mail', 'requester_subject')
	to_addr = [api.get_single_user_email(first_task["rel_user"])]
	template = config.get('mail', 'requester_template')

	# Get data from the user who generated the task
	userStr = api.get_name_user(first_task["rel_user"])

	# Prepare task details
	taskType = first_task['type']
	
	# Get data from the affected virtual machine
	vm = api.get_virtualmachine_details(first_task["rel_target"])	

	# Build the html for the virtual machine/s
	vmhtmlbody = ""
	for task in tasks:
		vmhtmlbody = vmhtmlbody + build_html_virtualmachine_template_answer(task, action)

	# Prepare the data for the template and load the template with the overrided values
	template_file = config.get('mail', 'requester_template')
	with file(template_file) as f:
		template = f.read()

	valueDict = {'taskType':taskType, 'vmRows':vmhtmlbody}
	htmlbody = string.Template(template).substitute(valueDict)

	send_email(from_addr, to_addr, subject, htmlbody)
Ejemplo n.º 11
0
def cook(config: settings.Config):
    logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s',
                        level=logging.INFO,
                        handlers=[
                            logging.FileHandler(config.log_output),
                            logging.StreamHandler()
                        ])

    task_handler = task.get(config)

    for epoch in range(config.epochs):
        logging.info('[+] start epoch {}'.format(epoch))
        model = task_handler.build_model(epoch)
        if config.use_generator:
            history = model.fit_generator(
                task_handler.train(),
                task_handler.training_step,
                epochs=epoch + 1,
                initial_epoch=epoch,
                verbose=1 if config.debug and not config.background else 2)
        else:
            history = model.fit(
                *task_handler.train(),
                config.batch_size,
                epochs=epoch + 1,
                initial_epoch=epoch,
                shuffle=True,
                verbose=1 if config.debug and not config.background else 2)
        utils.logging_history(history)
        if hasattr(task_handler, 'callback'):
            task_handler.callback(epoch)
        try:
            if config.use_generator:
                evaluations = model.evaluate_generator(
                    task_handler.valid(),
                    steps=task_handler.validation_step,
                    verbose=1 if config.debug and not config.background else 2)
            else:
                evaluations = task_handler.test_model.evaluate(
                    *task_handler.valid(),
                    config.batch_size,
                    verbose=1 if config.debug and not config.background else 0)
            utils.logging_evaluation(
                dict(zip(task_handler.test_model.metrics_names, evaluations)))
        except Exception as e:
            print(e)
        if hasattr(task_handler, 'callback_valid'):
            task_handler.callback_valid(epoch)
        logging.info('[-] finish epoch {}'.format(epoch))

    if config.use_generator:
        users_ = []
        imprs_ = []
        mask_ = []
        y_true_ = []
        y_pred_ = []
        for feature, [users, imprs, mask, y_true] in task_handler.test():
            users_.append(users)
            imprs_.append(imprs)
            mask_.append(mask)
            y_true_.append(y_true)
            y_pred_.append(
                task_handler.test_model.predict_on_batch(feature).reshape(
                    (-1, )))
        users = np.hstack(users_)
        imprs = np.hstack(imprs_)
        mask = np.hstack(mask_)
        y_true = np.hstack(y_true_)
        y_pred = np.hstack(y_pred_)
    else:
        feature, [users, imprs, mask, y_true] = task_handler.test()
        y_pred = task_handler.test_model.predict(
            feature,
            batch_size=config.batch_size,
            verbose=1
            if config.debug and not config.background else 0).reshape((-1, ))

    class Result:
        def __init__(self, auc, mrr, ndcgv, ndcgx, pos, size, idx):
            self.auc = auc
            self.mrr = mrr
            self.ndcgv = ndcgv
            self.ndcgx = ndcgx
            self.pos = pos
            self.size = size
            self.idx = idx

        @property
        def result(self):
            return dict(auc=self.auc,
                        ndcgx=self.ndcgx,
                        ndcgv=self.ndcgv,
                        mrr=self.mrr)

        @property
        def info(self):
            return dict(pos=self.pos, size=self.size, num=self.idx * 2 + 1)

    def average(results):
        return Result(np.mean([result.auc for result in results]),
                      np.mean([result.mrr for result in results]),
                      np.mean([result.ndcgv for result in results]),
                      np.mean([result.ndcgx for result in results]),
                      np.mean([result.pos for result in results]),
                      np.mean([result.size for result in results]),
                      np.mean([result.idx for result in results]))

    present_user = users[0]
    present_impr = imprs[0]
    index = 0
    impr_index = 0
    user_results = []
    impr_results = []
    iv_user_results = []
    oov_user_results = []
    for i, user, impr in zip(range(1, len(y_pred)), users[1:], imprs[1:]):
        if user != present_user or impr != present_impr:
            try:
                impr_results.append(
                    Result(
                        task.roc_auc_score(y_true[index:i], y_pred[index:i]),
                        utils.mrr_score(y_true[index:i], y_pred[index:i]),
                        utils.ndcg_score(y_true[index:i], y_pred[index:i], 5),
                        utils.ndcg_score(y_true[index:i], y_pred[index:i], 10),
                        sum(y_true[index:i]), i - index, len(impr_results)))
                index = i
                present_impr = impr
            except Exception as e:
                utils.interactive_console(locals())
        if user != present_user:
            avg = average(impr_results[impr_index:])
            user_results.append(avg)
            if mask[index] == 1:
                iv_user_results.append(avg)
            elif mask[index] == 0:
                oov_user_results.append(avg)
            impr_index = len(impr_results)
            present_user = user

    user_result = average(user_results)
    impr_result = average(impr_results)
    iv_user_result = average(iv_user_results)
    oov_user_result = average(oov_user_results)

    utils.logging_evaluation(user_result.result)
    utils.logging_evaluation(user_result.info)
    utils.logging_evaluation(impr_result.result)
    utils.logging_evaluation(impr_result.info)
    utils.logging_evaluation(iv_user_result.result)
    utils.logging_evaluation(iv_user_result.info)
    utils.logging_evaluation(oov_user_result.result)
    utils.logging_evaluation(oov_user_result.info)
    return 0
Ejemplo n.º 12
0
    'django.contrib.messages.middleware.MessageMiddleware',
    'django.middleware.clickjacking.XFrameOptionsMiddleware',
)

ROOT_URLCONF = 'payment_sys.urls'

WSGI_APPLICATION = 'payment_sys.wsgi.application'


# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases

DATABASES = {
    'default': {
        'ENGINE': 'django.db.backends.mysql',
        'NAME': task.get("DBNAME"),
        'USER': '******',
        'PASSWORD': '',
        'HOST': '127.0.0.1',
        'PORT': '',
    }
}

# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/

LANGUAGE_CODE = 'ru-ru'

TIME_ZONE = 'UTC'

USE_I18N = True
Ejemplo n.º 13
0
import random

import eventlet
import task

@task.ify()
def long_action(number, task_id, progress):
    tries = (progress or 0) + 1
    eventlet.sleep(random.randint(1, 10) / 10)
    if random.randint(1, 10) > 3:
        print "Action %s Failed" % number
        raise task.Failure(tries)
    print "Action %s Succeeded after %s tries" % (number, tries)
    return tries

task.setup_db('sqlite://') # in memory db

task_ids = []
for i in xrange(10):
    task_ids.append(long_action(i))

while not all(task.is_complete(task_id) for task_id in task_ids):
    task_id =  task.claim()
    if task_id:
        eventlet.spawn_n(task.run, task_id)
    eventlet.sleep(0)

for task_id in task_ids:
    print task.get(task_id)['attempts']

Ejemplo n.º 14
0
#!/usr/bin/env python
import os
import sys

os.sys.path.insert(0,os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import task
import lib.conn as conn

if __name__ == "__main__":
    os.environ.setdefault("DJANGO_SETTINGS_MODULE", "office_sys.settings")

    flag = False
    if sys.argv[1] == 'runserver' and len(sys.argv) > 3:
        os.environ.setdefault("NODE_IND", sys.argv.pop(3))
        try:
            rpc_srv = conn.TimeoutServerProxy(task.get("URL", 'center') + 'xmlrpc/', timeout=2)
            flag = rpc_srv.register_office(task.get("NAME"), task.get("URL"), task.get("EMAIL"), task.get('BILL'))
            
        except Exception, exp:
            print "Error connection with center: " + str(exp)

    else:
        os.environ.setdefault("NODE_IND", "office1")
        flag = True
        
    if flag:
        from django.core.management import execute_from_command_line
        
        execute_from_command_line(sys.argv)
        
        
Ejemplo n.º 15
0
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os

os.sys.path.insert(0,os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) 
import task

PAYSYS_CLIENT_ID = 'W91U15BX'
PAYSYS_TRANSFER_PAGE = task.get('B_URL', 'payment') + 'tranfer/?'
PAYSYS_XMLRPC_PAGE = task.get('URL', 'payment') + 'xmlrpc/'

EMAIL = task.get('EMAIL')
PASSWORD = task.get('PASS')

BASE_DIR = os.path.dirname(os.path.dirname(__file__))

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ph4ysr7%mm62nfmgy1&i!u=9^##&%-*w!9guvb4)*de1#v2ur3'

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True

TEMPLATE_DEBUG = True

ALLOWED_HOSTS = []


# Application definition
Ejemplo n.º 16
0
def task():
    workflow = Workflows(WORKNUM, 'R', 'THREAD')
    workflow.start()
    last_stat = datetime.datetime.now()
    local_spider = {}
    while True:
        for task in schedule():
            module_name = task['filepath'].replace('.py', '').replace('/', '.')
            task['update_time'] = datetime.datetime.strptime(task['update_time'], '%Y-%m-%d %H:%M:%S')
            cls_name = task['article']
            module = __import__(module_name, fromlist=['task.%s' % task['unit']])
            cls = getattr(module, cls_name)
            if task.get('type', 'FOREVER') == 'FOREVER':
                spider = local_spider.get(cls_name, None)
                if spider is None:
                    spider = cls(worknum=20, queuetype='R', worktype='THREAD', tid=int(task['_id']))
                    local_spider[cls_name] = spider
            else:
                spider = cls(worknum=task['worknum'], queuetype=task['queuetype'], worktype=task['worktype'], tid=int(task['_id']))
            try:
                changestate(task['_id'], 2)
                step = task.get('step', 1) - 1
                additions = {}
                additions['name'] = task['name']
                additions['cat'] = task['category'].split(',')
                additions['tag'] = task['tag'].split(',')
                additions = dict(json.loads(task['additions']), **additions)
                if task.get('type', 'FOREVER') == 'FOREVER':
                    if ((datetime.datetime.now() - task['update_time']).seconds)/3600 < task.get('period', 12):
                        continue
                    weight = spider.weight(task['flow'], once=True)
                    section = spider.section(task['flow'], step)
                    if task['params'] is None or task['params'].strip() == '':
                        workflow.task(weight, section, task['_id'], **{'additions':additions})
                    elif task['params'].startswith('{'):
                        workflow.task(weight, section, task['_id'], **dict(json.loads(task['params']), **{'additions':additions}))
                    elif task['params'].startswith('('):
                        workflow.task(weight, section, task['_id'], *tuple(task['params'][1:-1].split(',')), **{'additions':additions})
                    else:
                        if task['index'] is None or task['index'].isdigit():
                            workflow.task(weight, section, task['_id'], task['params'], **{'additions':additions})
                        else:
                            workflow.task(weight, section, task['_id'], **{task['index']:task['params'], 'additions':additions})
                else:
                    if task['params'] is None or task['params'].strip() == '':
                        spider.fetchDatas(task['flow'], step, **{'additions':additions})
                    elif task['params'].startswith('{'):
                        spider.fetchDatas(task['flow'], step, **dict(json.loads(task['params']), **{'additions':additions}))
                    elif task['params'].startswith('('):
                        spider.fetchDatas(task['flow'], step, *tuple(task['params'][1:-1].split(',')), **{'additions':additions})
                    else:
                        if task['index'] is None or task['index'].isdigit():
                            spider.fetchDatas(task['flow'], step, task['params'], **{'additions':additions})
                        else:
                            spider.fetchDatas(task['flow'], step, **{task['index']:task['params'], 'additions':additions})
                    spider.statistic()
                    changestate(task['_id'], 0)
                    if task.get('push_url') is not None:
                        requPost(task['push_url'], {'type':'video', 'tid':task['_id']})
            except:
                t, v, b = sys.exc_info()
                err_messages = traceback.format_exception(t, v, b)
                extra = ','.join(err_messages)
                print extra
                changestate(task['_id'], 3, extra=extra)
            else:
                if not task.get('type', 'FOREVER') == 'FOREVER':
                    stat(task, spider)
            finally:
                if ((datetime.datetime.now() - last_stat).seconds) >= LIMIT:
                    last_stat = datetime.datetime.now()
                    for spider in local_spider.values():
                        spider.statistic()
                        stat(task, spider, last_stat)
                    
        time.sleep(60)
Ejemplo n.º 17
0
import task
import utils

_config = {}
_config['input_training_data_path'] = '../data'
_config['input_validation_data_path'] = '../data'
_config['input_previous_model_path'] = '../models'
_config['output_model_path'] = '../models'
_config['log_dir'] = '../logs'
_config['node_count'] = 1
_config['node_list_path'] = ''
_config['node_id'] = 1
_config['node_name'] = ''
settings.public_config = _config
tf.gfile.MkDir(_config['output_model_path'])

config = {
    'task': 'TestPipelineBody',
    'name': '',
    'batch_size': 256,
    'pipeline_input': '../../pipeline-2018-12-01'
}

config = settings.Config(config)

task_handler = task.get(config)
task_handler.load_model()
task_handler.test_doc_vec()
task_handler.test_user_vec()
task_handler.test_user_doc_score()
task_handler.test_correct()
Ejemplo n.º 18
0
 def test_change_task_name(self):
     task_id = one_name()
     task.run(task_id)
     self.assertTrue(task.exists(task_id))
     self.assertTrue(task.is_complete(task_id))
     self.assertEqual(task.get(task_id)['task_name'], 'another_name')
Ejemplo n.º 19
0
 def test_change_task_name(self):
     task_id = one_name()
     task.run(task_id)
     self.assertTrue(task.exists(task_id))
     self.assertTrue(task.is_complete(task_id))
     self.assertEqual(task.get(task_id)['task_name'], 'another_name')
Ejemplo n.º 20
0
def run():
    workflow = Workflows(WORKNUM, 'M', 'THREAD', settings=WORKQUEUE)
    workflow.start()
    last_stat = datetime.datetime.now()
    local_spider = {}
    while True:
        for task in schedule():
            module_name = task['filepath'].replace('.py', '').replace('/', '.')
            task['update_time'] = datetime.datetime.strptime(task['update_time'], '%Y-%m-%d %H:%M:%S')
            cls_name = task['article']
            module = __import__(module_name, fromlist=['task.%s' % task['unit']])
            cls = getattr(module, cls_name)
            spider = local_spider.get(cls_name, None)
            if spider is None:
                callback = functools.partial(push, datamodel=task['datamodel'], url=task['push_url'], tid=int(task['_id']))
                spider = cls(worknum=task['worknum'], queuetype='P', worktype='THREAD', tid=int(task['_id']), settings=WORKQUEUE, callback=callback)
                local_spider[cls_name] = spider
                
            if task.get('type', 'FOREVER') == 'FOREVER' and (datetime.datetime.now() - task['update_time']).total_seconds() < task.get('period', 3600 * 12):
                continue

            if not task.get('type', 'FOREVER') == 'FOREVER' and not task['state'] == 1:
                continue

            try:
                changestate(task['_id'], 2)
                step = task.get('step', 1) - 1
                additions = {}
                additions['name'] = task['name']
                additions['category'] = task['category']
                additions['tag'] = task['tag'].split(',')
                if task['params'].startswith('{') and task['params'].endswith('}'):
                    args = []
                    kwargs = json.loads(task['params'])
                elif task['index'] is None or task['index'].isdigit():
                    args = [task['params'], ]
                    kwargs = {}
                else:
                    args = []
                    kwargs = {task['index']:task['params']}
                kwargs['additions'] = dict(json.loads(task['additions']), **additions)

                args.insert(0, datetime.datetime.now().strftime('%Y%m%dT%H:%M'))

                if task.get('type', 'FOREVER') == 'FOREVER':
                    section = spider.select(task['flow'], step)
                    args.insert(0, task['_id'])
                    args.insert(0, section)
                    fun = workflow.task
                    workflow.task(*args, **kwargs)
                else:
                    args.insert(0, step)
                    args.insert(0, task['flow'])
                    threading.Thread(target=spider.fetchDatas, args=args, kwargs=kwargs).start()
            except:
                t, v, b = sys.exc_info()
                err_messages = traceback.format_exception(t, v, b)
                extra = ','.join(err_messages)
                changestate(task['_id'], 3, extra=extra)
                    
        time.sleep(1)