Exemple #1
0
import db
import datetime as dt
import settings
import crowd
import math

# logging
import logging
logger = settings.createLog("dynamic_pricer")


def dynamic_pricing():
    logger.info("Start updating bonus")
    logger.debug(len(settings.bonus_matrix))
    
    for x in range(0, len(settings.bonus_matrix)):
        logger.debug(x)
        session = db.Session()
        if x == len(settings.bonus_matrix)-1:
            logger.debug("if")
            query = session.query(db.Task).filter(db.Task.datetime < (dt.datetime.now() - dt.timedelta(days=settings.bonus_matrix[x][0]))).filter(db.Task.datetime > (dt.datetime.now() - dt.timedelta(days=settings.delete_time))).filter(db.Task.finished_rating == None).filter(db.Task.garbage_flag == False)
        else:
            logger.debug("else")
            query = session.query(db.Task).filter(db.Task.datetime < (dt.datetime.now() - dt.timedelta(days=settings.bonus_matrix[x][0]))).filter(db.Task.datetime > (dt.datetime.now() - dt.timedelta(days=settings.bonus_matrix[x+1][0]))).filter(db.Task.finished_rating == None).filter(db.Task.garbage_flag == False)
        tasks = query.all()
        for task in tasks:
            new_bonus = math.ceil(settings.bonus_matrix[x][1] * task.price *100)/ 100
            if new_bonus != task.price_bonus:
                task.price_bonus = new_bonus
                crowd.set_bonus(task)
                logger.info("-set bonus of task " +str(task.id) + " to " + str(task.price_bonus))
Exemple #2
0
# schedules jobs
from apscheduler.scheduler import Scheduler
import scraper
import dynamic_pricer
import garbage_collector


# Filters for templates
application.jinja_env.filters['humanize_date'] = utils.humanize_date
application.jinja_env.filters['rate_average'] = utils.rate_average
application.jinja_env.filters['ago'] = ago.human

# Logging
import logging
logger = settings.createLog("main_app")


# WEBAPP
###########################################################

@application.route('/', methods=['GET'])
def get_index():
    with db.session_scope() as session:
        num_keywords = session.query(db.Keyword).count()
        num_resolved_tasks = session.query(db.Task).filter(db.Task.finished_rating != None).count()
        return render_template('index.html',
                    num_keywords=num_keywords,
                    num_resolved_tasks=num_resolved_tasks)

@application.route('/search', methods=['GET'])
Exemple #3
0
import settings
import requests
import xml.etree.ElementTree as ET
import dateutil.parser
from lxml import etree
from StringIO import StringIO
import db
import json
import crowd
import re
import math
import logging

logger = settings.createLog("scraper")

TEXT_SIZE = 250


def fetch_rss(url):
    try:
        response = requests.get(url)
        return ET.fromstring(response.text.encode("utf-8"))
    except Exception as e:
        return None


def parse_date(date_str):
    date = dateutil.parser.parse(date_str)
    return date.replace(tzinfo=None)

Exemple #4
0
import settings

# Logging
import logging
logger = settings.createLog("start_scheduler")


from apscheduler.scheduler import Scheduler
import scraper
import dynamic_pricer
import garbage_collector

if __name__ == "__main__": 
    logger.info("startScheduledJobs file")
    sc = Scheduler(standalone=True)
    # Schedule  to be called every hour
    sc.add_interval_job(scraper.scrape, hours=1)
    sc.add_interval_job(dynamic_pricer.dynamic_pricing, hours=1)
    sc.add_interval_job(garbage_collector.garbage_collecting, hours=1)

    print('Press Ctrl+C to exit')
    try:
        sc.start()
    except (KeyboardInterrupt, SystemExit):
        pass
import db
import datetime as dt
import settings
import requests
import crowd


import logging
logger = settings.createLog("garbage_collection")

def garbage_collecting():
	logger.info("Starting garbage collector")

	session = db.Session()
	query = session.query(db.Task).filter(db.Task.datetime < (dt.datetime.now() - dt.timedelta(days = settings.delete_time))).filter(db.Task.finished_rating == None).filter(db.Task.garbage_flag == False)
	tasks = query.all()
	for task in tasks:
		status = crowd.set_garbage(task)
		if status == requests.codes.ok:
			task.garbage_flag = True
			session.commit()
			logger.info("task " + str(task.id) + " is now garbage")
		else:
			logger.warning("Error: Can not set task " + str(task.id) + " to garbage (Error " + str(status) + ")")
	logger.info("Finished garbage collector")
	logger.info("")
	logger.info("")


if __name__ == '__main__':
	garbage_collecting()