コード例 #1
0
ファイル: autocloud_job.py プロジェクト: TridevGuha/autocloud
def main():
    jobqueue = Queue('jobqueue')
    jobqueue.connect()
    while True:
        task = jobqueue.wait()
        log.debug("%s", task.data)
        auto_job(task.data)
コード例 #2
0
def main():
    jobqueue = Queue('jobqueue')
    jobqueue.connect()

    while True:
        task = jobqueue.wait()

        task_data = task.data
        pos, num_images = task_data['pos']

        compose_details = task_data['compose']

        if pos == 1:
            session = init_model()
            compose_id = compose_details['id']
            compose_obj = session.query(ComposeDetails).filter_by(
                compose_id=compose_id).first()

            compose_status = compose_obj.status.code

            # Here the check if the compose_status has completed 'c' is for
            # failsafe. This condition is never to be hit. This is to avoid
            # sending message to fedmsg.
            if compose_status in ('r', 'c'):
                log.info("Compose %s already running. Skipping sending to \
                fedmsg" % compose_id)
            else:
                compose_obj.status = u'r'
                session.commit()

                params = copy.deepcopy(compose_details)
                params.update({'status': 'running'})
                publish_to_fedmsg(topic='compose.running', **params)

        result, running_status = auto_job(task_data)
コード例 #3
0
def main():
    jobqueue = Queue('jobqueue')
    jobqueue.connect()
    while True:
        task = jobqueue.wait()
        log.debug("%s", task.data)
        auto_job(task.data)
コード例 #4
0
def main():
    jobqueue = Queue('jobqueue')
    jobqueue.connect()

    while True:
        task = jobqueue.wait()

        task_data = task.data
        pos, num_images = task_data['pos']

        compose_details = task_data['compose']

        if pos == 1:
            session = init_model()
            compose_id = compose_details['id']
            compose_obj = session.query(ComposeDetails).filter_by(
                compose_id=compose_id).first()

            compose_status = compose_obj.status.code

            # Here the check if the compose_status has completed 'c' is for
            # failsafe. This condition is never to be hit. This is to avoid
            # sending message to fedmsg.
            if compose_status in ('r', 'c'):
                log.info("Compose %s already running. Skipping sending to \
                fedmsg" % compose_id)
            else:
                compose_obj.status = u'r'
                session.commit()

                params = copy.deepcopy(compose_details)
                params.update({'status': 'running'})
                publish_to_fedmsg(topic='compose.running', **params)

        result, running_status = auto_job(task_data)
コード例 #5
0
def main():
    jobqueue = Queue('jobqueue')
    jobqueue.connect()

    while True:
        task = jobqueue.wait()

        task_data = task.data
        pos, num_images = task_data['pos']

        compose_details = task_data['compose']

        if pos == 1:
            session = init_model()
            compose_id = compose_details['id']
            compose_obj = session.query(ComposeDetails).filter_by(
                compose_id=compose_id).first()
            compose_obj.status = u'r'
            session.commit()


            params = copy.deepcopy(compose_details)
            params.update({'status': 'running'})
            publish_to_fedmsg(topic='compose.running', **params)

        result, running_status = auto_job(task_data)
コード例 #6
0
ファイル: base.py プロジェクト: sayanchowdhury/bugyou_plugins
class BasePlugin(object):
    __metaclass__ = abc.ABCMeta

    def __init__(self, *args, **kwargs):
        self.config = load_config(PLUGINS_CONFIG_FILEPATH)
        self.active_services = get_active_services()
        self.services = []

    def initialize(self):
        self.init_retask_connection()
        self.load_services()
        self.init_worker()

    def init_retask_connection(self):
        """ Connect to the retask queue for the plugin """
        self.queue = Queue(self.plugin_name)
        conn = self.queue.connect()
        log.info("Initializing redis conection: %s" % self.plugin_name)
        if not conn:
            log.error("Could not connect to %s queue" % self.plugin_name)
            return False

    def consume(self):
        while True:
            task = self.queue.wait()
            if task:
                log.debug("Processing Message: %s" % task.data['msg']['body']['msg_id'])
                self.process(task.data['msg'])

    def init_worker(self):
        """ Create a process and start consuming the messages """
        process = multiprocessing.Process(target=self.consume)
        process.start()

    def load_services(self):
        """ Load the services for the plugin """
        services = self.config.get(self.plugin_name, 'services').split(',')
        log.info("Start loading services")
        for service in services:
            self.services.append(self.active_services[service].load())
        log.info("Complete loading services %s" % self.services)

    @abc.abstractmethod
    def process(self):
        """ Consumes the messages from retask """
        return

    @abc.abstractmethod
    def do_pagure(self):
        """ Override to do activity related to pagure """
        return
コード例 #7
0
ファイル: RedisQueue.py プロジェクト: rossdylan/netcrawl
class RedisQueue(object):
    def __init__(self, host, name, port=6379, password=None):
        self.super_queue = Queue(
                name,
                {
                    'host': host,
                    'port': port,
                    'db': 0,
                    'password': password,
                })
        self.super_queue.connect()

    def get(self):
        return self.super_queue.wait()

    def put(self, data):
        self.super_queue.enqueue(Task(data))
コード例 #8
0
ファイル: worker.py プロジェクト: birajkarmakar/rcc
def main():
    queue = Queue('rcc')
    queue.connect()
    while True:
        task = queue.wait()
        name = task.data['filename']
        print "Received", name
        content = task.data['text']
        destdir = writesource(name, content)
        temp_path = os.path.join(destdir, name)
        x = os.path.join(destdir, 'test')
        out, err = system('gcc ' + temp_path + ' -o ' + x)
        if err:
            queue.send(task, err, 120)
        else:
            out1, err1 = system(x)
            if err1:
                queue.send(task, err1, 120)
            else:
                queue.send(task, out1, 120)
コード例 #9
0
ファイル: puluworker.py プロジェクト: kushaldas/pulu
def main():
    q = Queue('puluupdates')
    q.connect()
    while True:
        task = q.wait()
        data = task.data
        user = data['repository']['owner']['name']
        if user not in ['kushaldas']:
            return
        reponame = data['repository']['name']
        names = set()
        # Now go through all commits and find the unique directory names
        for commit in data['commits']:
            for fpath in commit['added']:
                names.add(fpath.split('/')[0])
            for fpath in commit['modified']:
                names.add(fpath.split('/')[0])

        # Now for each name, update the blog posts
        for name in names:
            if os.path.isdir(os.path.join('gitsources', user, name)):
                blog_post(user, name,
                          os.path.join('gitsources', user, name), data['commits'])
        reload_blog()
コード例 #10
0
ファイル: puluworker.py プロジェクト: kushaldas/pulu
def main():
    q = Queue('puluupdates')
    q.connect()
    while True:
        task = q.wait()
        data = task.data
        user = data['repository']['owner']['name']
        if user not in ['kushaldas']:
            return
        reponame = data['repository']['name']
        names = set()
        # Now go through all commits and find the unique directory names
        for commit in data['commits']:
            for fpath in commit['added']:
                names.add(fpath.split('/')[0])
            for fpath in commit['modified']:
                names.add(fpath.split('/')[0])

        # Now for each name, update the blog posts
        for name in names:
            if os.path.isdir(os.path.join('gitsources', user, name)):
                blog_post(user, name, os.path.join('gitsources', user, name),
                          data['commits'])
        reload_blog()
コード例 #11
0
ファイル: async_worker.py プロジェクト: d1ffuz0r/retask
from retask.queue import Queue
import time
queue = Queue('example')
queue.connect()
task = queue.wait()
print task.data
time.sleep(15)
queue.send(task, "We received your information dear %s" % task.data['user'])