Example #1
0
def import_wetmills(task):  #pragma: no cover
    from .models import import_csv_wetmills
    from django.db import transaction

    transaction.enter_transaction_management()
    transaction.managed()

    try:
        task.task_id = import_wetmills.request.id
        task.import_log = "Started import at %s\n" % datetime.now()
        task.save()

        transaction.commit()

        wetmills = import_csv_wetmills(task.country, task.csv_file.file.name,
                                       task.created_by)

        task.log("Import finished at %s\n" % datetime.now())
        task.log("%d wetmill(s) added." % len(wetmills))

        transaction.commit()

    except Exception as e:
        import traceback
        traceback.print_exc()

        task.log("Error: %s" % e)
        transaction.commit()

        raise e

    finally:
        transaction.leave_transaction_management()

    return task
Example #2
0
def finalize_season(task):  #pragma: no cover
    from .models import SeasonAggregate
    from django.db import transaction

    transaction.enter_transaction_management()
    transaction.managed()

    try:
        task.task_id = finalize_season.request.id
        task.task_log = "Started finalizing season at %s\n" % datetime.now()
        task.save()

        transaction.commit()

        report_count = SeasonAggregate.calculate_for_season(task.season)

        task.log("Season finished at %s\n" % datetime.now())
        task.log("%d finalized reports(s) included in season." % report_count)

        transaction.commit()

    except Exception as e:
        import traceback
        traceback.print_exc()

        task.log("Error: %s" % e)
        transaction.commit()

        raise e

    finally:
        transaction.leave_transaction_management()

    return task
Example #3
0
def call_connector_task(task_id):
    """ call connector with args.
	"""
    connector = Connector()
    logger.info("call connector with task_id: %s", task_id)

    task = Task.objects.get(id=task_id)
    if task.status == States.DONE:
        return
    if task.status == States.PROCESSING:
        Picture.objects.filter(task_id=task.id).delete()
    else:
        task.status = States.PROCESSING
        task.save()
    try:
        data = connector.process_task(task)
    except Exception as e:
        return call_connector_task.retry(args=(task_id),
                                         countdown=60,
                                         exec=e,
                                         max_retries=2)
    for pic in data:
        p = Picture(link=pic["link"],
                    pic_id=pic["id"],
                    created_date=pic["date"],
                    task=task)
        p.save()

    task.status = States.DONE
    task.save()
def calculateTask(task, sid, model):
    try:
        generate_mol_image(task)
        suite = SuiteTask.objects.get(sid=sid)
        map_model_name = get_model_name(model['model'])
        smile = task.file_obj.smiles.encode(
            'utf-8') if task.file_obj.file_type != 'mol' else ''

        # smile, mol_fpath 输入只选择一种方式(优先smile)
        mol_fpath = os.path.join(
            settings.SETTINGS_ROOT,
            task.file_obj.file_obj.path) if not smile else None

        try:
            temperature = float(model.get('temperature'))
        except:
            temperature = DEFAULT_TEMPERATURE_ARGS

        chemistry_logger.info(
            'PredictionModel calculating: model name(%s),'
            'smile(%s) mol path(%s) temperature(%s)', map_model_name, smile,
            mol_fpath, temperature)
        # 后台模型计算入口
        predict_results = prediction_model_calculate(map_model_name, smile,
                                                     mol_fpath, temperature)

        if task.file_obj.file_type == 'mol':
            name = os.path.basename(mol_fpath).split('.')[0]
        else:
            name = smile
        result = predict_results[name][map_model_name]
        chemistry_logger.info('[task]result %s' % result)
    except KeyError:
        chemistry_logger.exception('still cannot support this model')
        result = None
        task.result_state = "We don't support this model now"
        task.status = StatusCategory.objects.get(category=STATUS_FAILED)
        suite.status_id = StatusCategory.objects.get(category=STATUS_FAILED)
    except Exception as e:
        chemistry_logger.exception('failed to submit task to prediction model')
        result = None
        task.result_state = str(e)
        task.status = StatusCategory.objects.get(category=STATUS_FAILED)
        suite.status_id = StatusCategory.objects.get(category=STATUS_FAILED)
    else:
        chemistry_logger.info("calculate Successfully in celery queue!")
        task.result_state = "Calculate Successfully!"
        task.status = StatusCategory.objects.get(category=STATUS_SUCCESS)
        suite.status_id = StatusCategory.objects.get(category=STATUS_WORKING)

    task.end_time = utils.get_real_now()
    task.results = json.dumps(result)

    suite.save()
    task.save()

    add_counter(suite.sid)

    return result
Example #5
0
def calculateTask(task, sid, model):
    try:
        generate_mol_image(task)
        suite = SuiteTask.objects.get(sid=sid)
        map_model_name = get_model_name(model['model'])
        smile = task.file_obj.smiles.encode('utf-8') if task.file_obj.file_type != 'mol' else ''

        # smile, mol_fpath 输入只选择一种方式(优先smile)
        mol_fpath = os.path.join(settings.SETTINGS_ROOT, task.file_obj.file_obj.path) if not smile else None

        try:
            temperature = float(model.get('temperature'))
        except:
            temperature = DEFAULT_TEMPERATURE_ARGS

        chemistry_logger.info('PredictionModel calculating: model name(%s),'
                              'smile(%s) mol path(%s) temperature(%s)',
                              map_model_name, smile, mol_fpath, temperature)
        # 后台模型计算入口
        predict_results = prediction_model_calculate(map_model_name, smile,
                                                     mol_fpath, temperature)

        if task.file_obj.file_type == 'mol':
            name = os.path.basename(mol_fpath).split('.')[0]
        else:
            name = smile
        result = predict_results[name][map_model_name]
        chemistry_logger.info('[task]result %s' % result)
    except KeyError:
        chemistry_logger.exception('still cannot support this model')
        result = None
        task.result_state = "We don't support this model now"
        task.status = StatusCategory.objects.get(category=STATUS_FAILED)
        suite.status_id = StatusCategory.objects.get(category=STATUS_FAILED)
    except Exception as e:
        chemistry_logger.exception('failed to submit task to prediction model')
        result = None
        task.result_state = str(e)
        task.status = StatusCategory.objects.get(category=STATUS_FAILED)
        suite.status_id = StatusCategory.objects.get(category=STATUS_FAILED)
    else:
        chemistry_logger.info("calculate Successfully in celery queue!")
        task.result_state = "Calculate Successfully!"
        task.status = StatusCategory.objects.get(category=STATUS_SUCCESS)
        suite.status_id = StatusCategory.objects.get(category=STATUS_WORKING)

    task.end_time = utils.get_real_now()
    task.results = json.dumps(result)

    suite.save()
    task.save()

    add_counter(suite.sid)

    return result
Example #6
0
def import_hashfile_task(hashfile_id):

    hashfile = Hashfile.objects.get(id=hashfile_id)

    task = Task(
        time = datetime.datetime.now(),
        message = "Importing hash file %s..." % hashfile.name
    )
    task.save()

    try:

        if hashfile.hash_type != -1: # if != plaintext
            task.message = "Importing hash file %s..." % hashfile.name
            task.save()

            Hashcat.insert_hashes(hashfile)

            task.message = "Comparing hash file %s to potfile..." % hashfile.name
            task.save()

            Hashcat.compare_potfile(hashfile)
        else:
            task.message = "Importing plaintext file %s..." % hashfile.name
            task.save()

            Hashcat.insert_plaintext(hashfile)
    except Exception as e:
        traceback.print_exc()
    finally:
        task.delete()
Example #7
0
def remove_hashfile_task(hashfile_id):

    hashfile = Hashfile.objects.get(id=hashfile_id)

    task = Task(time=datetime.datetime.now(),
                message="Removing hash file %s..." % hashfile.name)
    task.save()

    try:
        Hashcat.remove_hashfile(hashfile)
    except Exception as e:
        traceback.print_exc()
    finally:
        task.delete()
def save_record(f, model, sid, source_type, smile=None, local_search_id=None):
    from chemistry.tasks import calculateTask, DEFAULT_TEMPERATURE_ARGS
    task = SingleTask()
    task.start_time = utils.get_real_now() 
    task.sid = SuiteTask.objects.get(sid=sid)
    task.pid = str(uuid.uuid4())
    task.model = ModelCategory.objects.get(category=model['model'])

    temperature = model.get('temperature')
    task.temperature = float(temperature) if temperature else DEFAULT_TEMPERATURE_ARGS

    if source_type == ORIGIN_UPLOAD:
        # here, f is ProcessedFile record instance
        f.file_source = FileSourceCategory.objects.get(category=source_type)
        f.file_type = "mol"
        task.file_obj = f
        f.save()
    elif source_type in (ORIGIN_SMILE, ORIGIN_DRAW):
        # here, f is a file path
        processed_f = ProcessedFile()
        obj = File(open(f, "r"))
        processed_f.title = os.path.basename(obj.name)
        processed_f.file_type = source_type
        processed_f.file_source = FileSourceCategory.objects.get(category=source_type)
        processed_f.file_obj = obj
        if smile:
            processed_f.smiles = smile

        if source_type == ORIGIN_SMILE and local_search_id is not None:
            processed_f.local_search_id = int(local_search_id)

        processed_f.save()
        task.file_obj = processed_f
        obj.close()
    else:
        raise ErrorCalculateType('Cannot recongize this source type')

    task.status = StatusCategory.objects.get(category=STATUS_WORKING)
    task.save()

    chemistry_logger.info('~~~~~~~~ t:%s' % task.start_time)

    calculateTask.delay(task, sid, model)
Example #9
0
def import_scorecards(task):  #pragma: no cover
    from .models import import_season_scorecards
    from django.db import transaction

    transaction.enter_transaction_management()
    transaction.managed()

    log = StringIO.StringIO()

    try:
        task.task_id = import_scorecards.request.id
        task.import_log = "Started import at %s\n\n" % datetime.now()
        task.save()

        transaction.commit()

        reports = import_season_scorecards(task.season,
                                           task.csv_file.file.name,
                                           task.created_by, log)

        task.log(log.getvalue())
        task.log("Import finished at %s\n" % datetime.now())
        task.log("%d scorecard(s) added." % len(reports))

        transaction.commit()

    except Exception as e:
        import traceback
        traceback.print_exc(e)

        task.log("\nError: %s\n" % e)
        task.log(log.getvalue())
        transaction.commit()

        raise e

    finally:
        transaction.leave_transaction_management()

    return task
Example #10
0
def call_connector_task(task_id):
	""" call connector with args.
	"""
	connector = Connector()
	logger.info("call connector with task_id: %s",task_id)
	
	task = Task.objects.get(id=task_id)
	if task.status == States.DONE:
		return
	if task.status == States.PROCESSING:
		Picture.objects.filter(task_id=task.id).delete()
	else:
		task.status = States.PROCESSING
		task.save()
	try:
		data = connector.process_task(task)
	except Exception as e:
		return call_connector_task.retry(args=(task_id),countdown=60,exec=e,max_retries=2)
	for pic in data:
		p = Picture(link=pic["link"],pic_id=pic["id"],created_date=pic["date"],task=task)
		p.save()

	task.status = States.DONE
	task.save()
def run_recurring_tasks():

    print "Looking for tasks..."

    now = datetime.datetime.now()

    tasks = RecurringTask.objects.filter(
        is_active=True, is_running=False).select_related("parent_job")
    active_tasks = []
    for task in tasks:
        if task.period_unit == "minutes":
            if task.last_started_on is None or task.last_started_on < now - relativedelta(minutes=task.period):
                active_tasks.append(task)
                task.is_running = True
                task.save()

        elif task.period_unit == "hours":
            if task.last_started_on is None or task.last_started_on < now - relativedelta(hours=task.period):
                active_tasks.append(task)
                task.is_running = True
                task.save()

        elif task.period_unit == "days":
            if task.last_started_on is None or task.last_started_on < now - relativedelta(days=task.period):
                active_tasks.append(task)
                task.is_running = True
                task.save()
                
    for r in active_tasks:
        job = r.parent_job

        r.last_started_on = datetime.datetime.now()
        r.save()

        task = JobTask(parent_recurring_task=r)
        task.save()

        run_batch_job.delay(task)
Example #12
0
def run_search_task(search_id):

    search = Search.objects.get(id=search_id)

    task = Task(
        time = datetime.datetime.now(),
        message = "Running search %s..." % search.name
    )
    task.save()

    if os.path.exists(search.output_file):
        os.remove(search.output_file)

    try:
        search.status = "Running"
        search.output_lines = None
        search.processing_time = None
        search.save()
        search_info = json.loads(search.json_search_info)

        start_time = time.time()

        cursor = connection.cursor()

        args = []
        columns = ["hashfile_id", "username", "password", "hash_type", "hash"]

        query = "SELECT %s FROM Hashcat_hash" % ",".join(columns)

        if "pattern" in search_info or not "all_hashfiles" in search_info or "ignore_uncracked" in search_info:
            query += " WHERE "

        if "pattern" in search_info:
            query_pattern_list = []
            for pattern in search_info["pattern"].split(';'):
                query_pattern_list.append("username LIKE %s")
                args.append("%" + pattern + "%")

            query += "(" + " OR ".join(query_pattern_list) + ")"

            if not "all_hashfiles" in search_info or "ignore_uncracked" in search_info:
                query += " AND "

        if not "all_hashfiles" in search_info:
            query += "hashfile_id IN (%s)" % ','.join(['%s'] * len(search_info["hashfiles"]))
            args += [int(i) for i in search_info["hashfiles"]]

            if "ignore_uncracked" in search_info:
                query += " AND "

        if "ignore_uncracked" in search_info:
            query += "password IS NOT NULL"


        tmpfile_name = ''.join([random.choice(string.ascii_lowercase) for i in range(16)])
        tmp_file = os.path.join(os.path.dirname(__file__), "..", "Files", "tmp", tmpfile_name)
        query += " INTO OUTFILE %s FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' LINES TERMINATED BY '\\n'"
        args.append(tmp_file)

        rows = cursor.execute(query, args)
        cursor.close()

        if os.path.exists(tmp_file):
            hash_types_dict = Hashcat.get_hash_types()
            hashfile_dict = {}
            for hashfile in Hashfile.objects.all():
                hashfile_dict[hashfile.id] = hashfile.name

            with open(search.output_file, 'w', newline='') as out_csvfile:
                spamwriter = csv.writer(out_csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
                with open(tmp_file, 'r', newline='') as csvfile:
                    spamreader = csv.reader(csvfile, delimiter=',', quotechar='"')
                    for row in spamreader:
                        try:
                            row[0] = hashfile_dict[int(row[0])]
                        except KeyError:
                            pass
                        try:
                            row[3] = hash_types_dict[int(row[3])]['name'] if int(row[3]) != -1 else "Plaintext"
                        except KeyError:
                            pass
                        except ValueError:
                            pass
                        except IndexError:
                            pass
                        spamwriter.writerow(row)

            os.remove(tmp_file)

        end_time = time.time()

        search.status = "Done"
        search.output_lines = int(rows)
        search.processing_time = int(end_time - start_time)
        search.save()

    except Exception as e:
        traceback.print_exc()
    finally:
        task.delete()
        task.num_rows, task.success_count, task.error_count = form.run(task, rows)
    except Exception, e:
        message = traceback.format_exc()
        subject = "[ActionKit Data Manager] Task %s (%s) failed :-(" % (task.id, name)
    else:
        message = "Num rows: %s.  Success count: %s.  Error count: %s." % (
            task.num_rows, task.success_count, task.error_count)
        if task.error_count:
            subject = "[ActionKit Data Manager] Task %s (%s) completed with errors =/" % (task.id, name)
        else:
            subject = "[ActionKit Data Manager] Task %s (%s) succeeded =)" % (task.id, name)
    message += "\n\nCheck it out here: http://%s/admin/main/jobtask/%s/" % (settings.SITE_DOMAIN, task.id)
    message += "\nThe job configuration is here: http://%s/admin/main/batchjob/%s/" % (settings.SITE_DOMAIN, job.id)

    task.completed_on = datetime.datetime.now()
    task.save()

    if recurrence is not None:
        recurrence.is_running = False
        recurrence.save()

    num = send_mail(subject, message, settings.DEFAULT_FROM_EMAIL,
                    [job.created_by.email] + [i[1] for i in settings.ADMINS], 
                    fail_silently=False)

    print "Sent %s mails with subject %s; job %s completed; %s rows" % (num, subject, job.id, task.num_rows)

@periodic_task(run_every=datetime.timedelta(seconds=60))
def run_recurring_tasks():

    print "Looking for tasks..."