Beispiel #1
0
def main(ctx, verbosity):
    configure_logger(verbosity)

    # Load the configurations from file
    config = Config(root_path=".")
    config.from_object(default_settings)
    config.from_envvar(ZUBBI_SETTINGS_ENV)

    # Validate the configuration
    tenant_sources_repo = config.get("TENANT_SOURCES_REPO")
    tenant_sources_file = config.get("TENANT_SOURCES_FILE")
    # Fail if both are set or none of both is set
    if (
        not tenant_sources_file
        and not tenant_sources_repo
        or (tenant_sources_file and tenant_sources_repo)
    ):
        raise ScraperConfigurationError(
            "Either one of 'TENANT_SOURCES_REPO' "
            "and 'TENANT_SOURCES_FILE' must be set, "
            "but not both."
        )

    # Store the config in click's context object to be available for subcommands
    ctx.obj = {"config": config}

    if ctx.invoked_subcommand is None:
        ctx.invoke(scrape)
Beispiel #2
0
def load_config():
    cfg = Config(dirname(dirname(__file__)))
    cfg.from_object("autobit.settings")
    if "AUTOBIT_SETTINGS" in os.environ:
        cfg.from_envvar("AUTOBIT_SETTINGS")

    if not exists(cfg['WATCH_DIR']):
        logger.info("Creating watch dir: {}".format(cfg['WATCH_DIR']))
        os.makedirs(cfg['WATCH_DIR'])

    return cfg
Beispiel #3
0
def load(config_filename='settings.py'):
    """Create a Flask config that will be used to update the application
    config when it is created."""
    config = Config("pjuu")

    # Load the default settings
    config.from_pyfile(config_filename)

    # Load the setting from the Environment variable
    config.from_envvar('PJUU_SETTINGS', silent=True)

    return config
Beispiel #4
0
def make_config(app=None):
    if app is not None:
        cfg = app.config
    else:
        from flask.config import Config
        root_path = os.path.dirname(__file__).rsplit('/', 1)[0]
        cfg = Config(root_path)

    # customize config here
    cfg.from_object(default_config)
    cfg.from_pyfile('myapp.cfg', silent=True)
    cfg.from_envvar('MYAPP_CONFIG', silent=True)
    cfg['BABEL_DEFAULT_LOCALE'] = cfg['LANG']
    return cfg
Beispiel #5
0
class IndexerDaemon(object):
    def __init__(self):
        self.config = Config(root_path="/")
        self.config.from_envvar('APP_CONFIG')

        self.redis = searcher.redis_connect(self.config)
        db.init_db(self.config['SQLALCHEMY_DATABASE_URI'])
        self.searcher = searcher.WhooshSearcher(
            index_dir=self.config['WHOOSH_INDEX_PATH'])
        self.running = False

    def _get_writer(self):
        return self.searcher.ix.writer()

    def main_loop(self):
        self.running = True
        while self.running:
            self.work_on_queue()

    def work_on_queue(self):
        """Queue operations (redis reliable queue pattern)

        Atomically pop an item from the work queue and put it on the
        processing queue; after processing the item is removed from
        the processing queue.

        The "processing" step fetch the specified bookmark from the SQL
        database and run the specified operation (index, update, delete);
        after completing the operation it updates the bookmark `indexed_on`
        field.
        """
        conn = self.redis
        msg = conn.brpoplpush(searcher.QUEUE_INDEX, searcher.QUEUE_WORK)
        log.debug("Processing new operation: {}".format(msg))

        try:
            payload = json.loads(msg)
            op, _id = payload
        except ValueError, ex:
            log.error("Invalid job in queue: {0}: {1}".format(msg, str(ex)))
        else:
Beispiel #6
0
import os
import sys
import time
import logging

from flask.config import Config

import boto.sqs
from boto.sqs.message import RawMessage
from boto import exception

LOG = logging.getLogger('alerta.sqs')

config = Config('/')
config.from_pyfile('/etc/alertad.conf', silent=True)
config.from_envvar('ALERTA_SVR_CONF_FILE', silent=True)

DEFAULT_AWS_REGION = 'eu-west-1'
DEFAULT_AWS_SQS_QUEUE = 'alerts'

AWS_REGION = os.environ.get('AWS_REGION') or config.get(
    'AWS_REGION', DEFAULT_AWS_REGION)
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID') or config.get(
    'AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY') or config.get(
    'AWS_SECRET_ACCESS_KEY')
AWS_SQS_QUEUE = os.environ.get('AWS_SQS_QUEUE') or config.get(
    'AWS_SQS_QUEUE', DEFAULT_AWS_SQS_QUEUE)


class Worker(object):
def make_config():
    cfg = Config('')
    cfg.from_object('datacat.settings.default')
    cfg.from_envvar('DATACAT_SETTINGS', silent=True)
    return cfg
Beispiel #8
0
import os
import sys
import time
import logging

from flask.config import Config

import boto.sqs
from boto.sqs.message import RawMessage
from boto import exception

LOG = logging.getLogger('alerta.sqs')

config = Config('/')
config.from_pyfile('/etc/alertad.conf', silent=True)
config.from_envvar('ALERTA_SVR_CONF_FILE', silent=True)

DEFAULT_AWS_REGION = 'eu-west-1'
DEFAULT_AWS_SQS_QUEUE = 'alerts'

AWS_REGION = os.environ.get('AWS_REGION') or config.get('AWS_REGION', DEFAULT_AWS_REGION)
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID') or config.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY') or config.get('AWS_SECRET_ACCESS_KEY')
AWS_SQS_QUEUE = os.environ.get('AWS_SQS_QUEUE') or config.get('AWS_SQS_QUEUE', DEFAULT_AWS_SQS_QUEUE)


class Worker(object):

    def __init__(self):

        try:
def make_config():
    cfg = Config('')
    cfg.from_object('datacat.settings.default')
    cfg.from_envvar('DATACAT_SETTINGS', silent=True)
    return cfg
from flask.config import Config
from celery.utils.log import get_task_logger
from ckanpackager.tasks.url_package_task import UrlPackageTask
from ckanpackager.tasks.datastore_package_task import DatastorePackageTask
from ckanpackager.tasks.dwc_archive_package_task import DwcArchivePackageTask

config = Config(__file__)
config.from_object('ckanpackager.config_defaults')
config.from_envvar('CKANPACKAGER_CONFIG')

from celery import Celery

app = Celery('tasks', broker=config['CELERY_BROKER'])
app.conf.CELERY_DISABLE_RATE_LIMITS = True
app.conf.CELERY_ACCEPT_CONTENT = ['json']
app.conf.CELERY_TASK_SERIALIZER = 'json'
app.conf.CELERY_CREATE_MISSING_QUEUES = True
app.conf.CELERY_DEFAULT_QUEUE = 'slow'


@app.task
def run_task(task, request):
    """ Run/enqueue the given task for the given request
   
    Note that the request should be validated before
    this is called.
 
    @param task: Name of the task. One of package_url,
                 package_dwc_archive or package_datastore
    @param request: Dictionary containing the request
    """