예제 #1
0
def controller():
    """
    Connect all the components together and press the big red button.
    """
    multilogging()
    config = DASAnalyticsConfig()
    config.configure()
    logconf = DASAnalyticsLogging(config)

    scheduler = TaskScheduler(config, cherrypy.engine)
    scheduler.subscribe()
    results = ResultManager(config)
    web = AnalyticsWeb(config, scheduler, results)
    logconf.logger.info("Analytics starting")
    logconf.add_handler(results)
    scheduler.add_callback(results.receive_task_result)

    if config.get_tasks():
        logconf.logger.info("Adding %d tasks", len(config.get_tasks()))
        for task in config.get_tasks():
            if config.no_start_offset:
                scheduler.add_task(task, offset=0)
            else:
                scheduler.add_task(task, offset=random.random() * task.interval)

    cherrypy.config["engine.autoreload_on"] = False
    cherrypy.config["server.socket_port"] = config.web_port

    cherrypy.tree.mount(web, config.web_base)

    pid = cherrypy.process.plugins.PIDFile(cherrypy.engine, config.pid)
    pid.subscribe()

    logconf.logger.info("Starting cherrypy")
    cherrypy.engine.start()
    cherrypy.engine.block()
    logconf.logger.info("Stopping cherrypy")
예제 #2
0
파일: results.py 프로젝트: ktf/DAS
DAS analytics results module
"""

# system modules
import re
import logging

# DAS modules
from pymongo import DESCENDING
from pymongo.errors import InvalidName
from bson.errors import InvalidDocument, InvalidStringData
from DAS.utils.das_db import db_connection
from DAS.analytics.config import DASAnalyticsConfig

DASAnalyticsConfig.add_option("db_uri", type=basestring,
      default="mongodb://localhost:27017/",
      help="MongoDB URI")
DASAnalyticsConfig.add_option("db_name", type=basestring,
      default="analytics_results",
      help="Name of MongoDB database")
DASAnalyticsConfig.add_option("db_coll", type=basestring,
      default="db", help="Name of MongoDB collection")
DASAnalyticsConfig.add_option("db_size", type=int,
      default=64*1024*1024,
      help="Maximum size of analytics internal DB")

class ResultManager(logging.Handler):
    """
    Class that receives and provides access to task results and
    logging information, and sorts these for web access.
예제 #3
0
파일: scheduler.py 프로젝트: ktf/DAS
        # some redundancy here, but just trapping
        # KeyboardInterrupt didn't seem to work
        signal.signal(signal.SIGINT, signal.SIG_IGN)
        try:
            return func(*args, **kwargs)
        except KeyboardInterrupt:
            #raise Exception("KeyboardInterrupt as Exception")
            return
    return wrapper

#monkey-patch multiprocessing worker function
from multiprocessing.pool import worker
multiprocessing.pool.worker = poolsafe(worker)

DASAnalyticsConfig.add_option("max_retries", type=int, default=1,
    help="Number of times the task scheduler will \
retry a failed task before abandoning it.")
DASAnalyticsConfig.add_option("retry_delay", type=int, default=60,
    help="Seconds to wait before retrying a failed job.")
DASAnalyticsConfig.add_option("minimum_interval", type=int, default=60,
    help="Minimum repeat interval allowed for a task.")
DASAnalyticsConfig.add_option("workers", type=int, default=4,
    help="Number of workers to use for tasks.")
class TaskScheduler(plugins.SimplePlugin):
    """
    This is a cron-like thread which keeps track of all the tasks to
    be run, and dispatches them to the worker pool at the appropriate
    time.

    A completed job calls back to this thread to request that it be
    re-scheduled at some point in the future.
예제 #4
0
from DAS.analytics.config import DASAnalyticsConfig
from DAS.analytics.scheduler import TaskScheduler
from DAS.analytics.web import AnalyticsWeb
from DAS.analytics.results import ResultManager
from DAS.analytics.utils import multilogging
import cherrypy
import re

import logging.handlers
import sys
import random

RE_TIMEROTATE = re.compile(r"^(\d+(?:\.\d*))\s*([hd])")
RE_SIZEROTATE = re.compile(r"^(\d+(?:\.\d*))\s*([km])")

DASAnalyticsConfig.add_option("verbose", group="Logging", type=bool, default=False, help="Verbose logging", short="v")
DASAnalyticsConfig.add_option("log_to_stdout", group="Logging", type=bool, default=False, help="Log to STDOUT")
DASAnalyticsConfig.add_option("log_to_stderr", group="Logging", type=bool, default=False, help="Log to STDERR")
DASAnalyticsConfig.add_option("log_to_file", group="Logging", type=bool, default=False, help="Log to file.")
DASAnalyticsConfig.add_option(
    "logfile", group="Logging", type=basestring, default="/tmp/das_analytics.log", help="Name for logfile."
)
DASAnalyticsConfig.add_option(
    "logfile_mode",
    group="Logging",
    type=basestring,
    default=None,
    help="Mode for rotating logs, if any. \
Numbers postfixed 'h' or 'd' indicate timed rotation, \
'k' or 'm' size rotation.",
)
예제 #5
0
파일: web.py 프로젝트: ktf/DAS
        "wrapper function"
        def inner(self, *args, **kwargs):
            "inner function"
            for arg in required:
                if not arg in kwargs:
                    return self.error("Required argument '%s' not given!" % arg)
            return func(self, *args, **kwargs)
        return inner
    return wrap

def json_requested():
    "Check JSON in cherrypy request headers"
    return 'accept' in cherrypy.request.headers and \
           'json' in cherrypy.request.headers['accept']

DASAnalyticsConfig.add_option("web_port", type=int, default=8213,
    help="Cherrypy serve port.")
DASAnalyticsConfig.add_option("web_base", type=basestring,
    default='/analytics',
    help="Base path for analytics web")
DASAnalyticsConfig.add_option("plotfairy_base", type=basestring,
    default='/plotfairy',
    help="Base path to a plotfairy instance to make graphs.")

class AnalyticsWeb(DASWebManager):
    """
    Use the DASWebManager class as the root of our interfaces,
    replacing the default header and footer.
    """
    def __init__(self, config, scheduler, results):
        self.config = config
        self._scheduler = scheduler