Example #1
0
def worker(args):
    env = os.environ.copy()
    env["AIRFLOW_HOME"] = settings.AIRFLOW_HOME

    # Celery worker
    from airflow.executors.celery_executor import app as celery_app
    from celery.bin import worker

    worker = worker.worker(app=celery_app)
    options = {"optimization": "fair", "O": "fair", "queues": args.queues, "concurrency": args.concurrency}

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations("worker", args.pid, args.stdout, args.stderr, args.log_file)
        handle = setup_logging(log_file)
        stdout = open(stdout, "w+")
        stderr = open(stderr, "w+")

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1), files_preserve=[handle], stdout=stdout, stderr=stderr
        )
        with ctx:
            sp = subprocess.Popen(["airflow", "serve_logs"], env=env)
            worker.run(**options)
            sp.kill()

        stdout.close()
        stderr.close()
    else:
        signal.signal(signal.SIGINT, sigint_handler)
        signal.signal(signal.SIGTERM, sigint_handler)

        sp = subprocess.Popen(["airflow", "serve_logs"], env=env)

        worker.run(**options)
        sp.kill()
Example #2
0
def celery():
    """
    Run celery worker.
    """
    from project.extensions import celery
    from celery.bin import worker
    worker = worker.worker(app=celery)
    worker.run()
Example #3
0
    def run(self):  # pylint: disable=E0202
        from flask import current_app
        from celery.bin import worker
        from async.celery_helpers import CeleryFactory
        celery = CeleryFactory(current_app).celery
        worker = worker.worker(app=celery)

        worker.run(loglevel=logging.INFO, state_db="async/celery_state", autoreload=True)
Example #4
0
def start_celery_worker():
    from celery import current_app
    from celery.bin import worker

    celery_app = current_app._get_current_object()
    worker = worker.worker(app=celery_app)
    options = {
        'broker': app.config['CELERY_BROKER_URL'],
        'loglevel': 'INFO',
        'traceback': True
    }
    worker.run(**options)
Example #5
0
def worker(args):
    # Worker to serve static log files through this simple flask app
    env = os.environ.copy()
    env["AIRFLOW_HOME"] = settings.AIRFLOW_HOME
    sp = subprocess.Popen(["airflow", "serve_logs"], env=env)

    # Celery worker
    from airflow.executors.celery_executor import app as celery_app
    from celery.bin import worker

    worker = worker.worker(app=celery_app)
    options = {"optimization": "fair", "O": "fair", "queues": args.queues, "concurrency": args.concurrency}
    worker.run(**options)
    sp.kill()
Example #6
0
def worker(args):
    # Worker to serve static log files through this simple flask app
    env = os.environ.copy()
    env['AIRFLOW_HOME'] = settings.AIRFLOW_HOME
    sp = subprocess.Popen(['airflow', 'serve_logs'], env=env)

    # Celery worker
    from airflow.executors.celery_executor import app as celery_app
    from celery.bin import worker

    worker = worker.worker(app=celery_app)
    options = {
        'optimization': 'fair',
        'O': 'fair',
        'queues': args.queues,
        'concurrency': args.concurrency,
    }
    worker.run(**options)
    sp.kill()
Example #7
0
def worker(args):
    env = os.environ.copy()
    env['AIRFLOW_HOME'] = settings.AIRFLOW_HOME

    # Celery worker
    from airflow.executors.celery_executor import app as celery_app
    from celery.bin import worker

    worker = worker.worker(app=celery_app)
    options = {
        'optimization': 'fair',
        'O': 'fair',
        'queues': args.queues,
        'concurrency': args.concurrency,
    }

    if not args.foreground:
        pid, stdout, stderr, log_file = setup_locations("worker", args.pid, args.stdout, args.stderr, args.log_file)
        handle = setup_logging(log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            files_preserve=[handle],
            stdout=stdout,
            stderr=stderr,
        )
        with ctx:
            sp = subprocess.Popen(['airflow', 'serve_logs'], env=env)
            worker.run(**options)
            sp.kill()

        stdout.close()
        stderr.close()
    else:
        signal.signal(signal.SIGINT, sigint_handler)
        signal.signal(signal.SIGTERM, sigint_handler)

        sp = subprocess.Popen(['airflow', 'serve_logs'], env=env)

        worker.run(**options)
        sp.kill()
Example #8
0
                              meta={'msg': 'Uploading all files to aws...'})
            item = store.save(item, data)
            item['paid'] = 2
            self.update_state(state='PROGRESS',
                              meta={'msg': 'Saving into database...'})
            g = sync(item)
            if g:
                g.join()
                store.redis.hset('app_record', appid, item['version_code'])
        else:
            self.update_state(state='PROGRESS',
                              meta={'msg': 'This app has been up to date...'})
    except socket.error, e:
        self.update_state(state='PROGRESS',
                          meta={'msg': 'Have some error happened...'})
        self.retry(exc=e)

    return item['appid']


if __name__ == "__main__":
    from celery.bin import worker

    worker = worker.worker(app=c)
    options = {
        'concurrency': 4,
        'loglevel': 'INFO',
        'traceback': True,
    }
    worker.run(**options)
Example #9
0
 def handle(self, *args, **options):
     worker.run(*args, **options)
Example #10
0
 def handle(self, *args, **options):
     worker.check_args(args)
     worker.run(**options)
Example #11
0
#/usr/bin/env python
from celery import current_app
from celery.bin import worker
from run import app

from settings import *
application = current_app._get_current_object()
application.config_from_object('settings')

worker = worker.worker(app=application)
options = {
    'broker': CELERY_BROKER_URL,
    'loglevel': 'INFO',
    'traceback': True,
}
worker.run(**options)
Example #12
0
                           translation=os.path.join(dali_dir,
                                                    transform_basename),
                           transformed=os.path.join(dali_dir,
                                                    translated_basename),
                           pdb1=pdb1,
                           pdb2=pdb2)
    os.symlink(pdb1, os.path.join(dali_dir, "{0}.pdb".format(pdbid1)))
    os.symlink(pdb2, os.path.join(dali_dir, "{0}.pdb".format(pdbid2)))
    phi = get_electrostatics_grid(pdbid1, chain=chain1, alignment_id=dali_dir)
    get_vanderderwaals_grids(pdbid1,
                             chain=chain1,
                             alignment_id=dali_dir,
                             box=phi,
                             scale=1 / phi.scale)
    phi = get_electrostatics_grid(pdbid2, chain=chain2, alignment_id=dali_dir)
    get_vanderderwaals_grids(pdbid2,
                             chain=chain2,
                             alignment_id=dali_dir,
                             box=phi,
                             scale=1 / phi.scale)

    with open(progress_file, 'w') as f:
        print("done", file=f)
    print("Finished.")


if __name__ == '__main__':
    app = current_app._get_current_object()
    worker = worker.worker(app=app)
    worker.run(**queue.conf)
Example #13
0
def worker(args):
    """Starts Airflow Celery worker"""
    env = os.environ.copy()
    env['AIRFLOW_HOME'] = settings.AIRFLOW_HOME

    if not settings.validate_session():
        print("Worker exiting... database connection precheck failed! ")
        sys.exit(1)

    # Celery worker
    from airflow.executors.celery_executor import app as celery_app
    from celery.bin import worker  # pylint: disable=redefined-outer-name

    autoscale = args.autoscale
    if autoscale is None and conf.has_option("celery", "worker_autoscale"):
        autoscale = conf.get("celery", "worker_autoscale")
    worker = worker.worker(app=celery_app)  # pylint: disable=redefined-outer-name
    options = {
        'optimization': 'fair',
        'O': 'fair',
        'queues': args.queues,
        'concurrency': args.concurrency,
        'autoscale': autoscale,
        'hostname': args.celery_hostname,
        'loglevel': conf.get('core', 'LOGGING_LEVEL'),
    }

    if conf.has_option("celery", "pool"):
        options["pool"] = conf.get("celery", "pool")

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations(
            "worker", args.pid, args.stdout, args.stderr, args.log_file)
        handle = setup_logging(log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            files_preserve=[handle],
            stdout=stdout,
            stderr=stderr,
        )
        with ctx:
            sub_proc = subprocess.Popen(['airflow', 'serve_logs'],
                                        env=env,
                                        close_fds=True)
            worker.run(**options)
            sub_proc.kill()

        stdout.close()
        stderr.close()
    else:
        signal.signal(signal.SIGINT, sigint_handler)
        signal.signal(signal.SIGTERM, sigint_handler)

        sub_proc = subprocess.Popen(['airflow', 'serve_logs'],
                                    env=env,
                                    close_fds=True)

        worker.run(**options)
        sub_proc.kill()
Example #14
0
 def handle(self, *args, **options):
     worker.run()