'''
Created on 2016年10月13日

@author: huawei
'''

from celery.app.base import Celery
from celery.result import AsyncResult
from kombu.entity import Queue, Exchange
import time

# from test_nfs_task import TestNfsTask

test ="amqp://*****:*****@172.16.4.134:5672/cabbage_vhost"
app = Celery('cabbage',broker=test)
app.config_from_object("cabbage.cabbage_celery.celeryconfig")
# import celeryconfig
# app.config_from_object(celeryconfig)


for k,v in app.conf.items():
    print k,v
app.conf.update(CELERY_ROUTES = {     
                 'test_ic_task.TestIcTask': {'queue': 'test2', 'routing_key': 'test2'},     
#                  'product_list_crawler.ProductListCrawlerTask': {'queue': 'celery', 'routing_key': 'celery'}
                  })
# taskId = "de1d0b16-57b1-4128-87bc-3697f78ab6dc"

state = app.events.State()
print app.tasks()
from celery.utils.log import get_task_logger
from domainmodeller.termextraction.JavaServicesTermExtractor import JavaServicesTermExtractor
from celery.app.base import Celery

from domainmodeller import settings
import domainmodeller.celeryconfig

logger = get_task_logger(__name__)

celery = Celery()
celery.config_from_object(domainmodeller.celeryconfig)


term_extractor = JavaServicesTermExtractor(settings.JAVA_SERVICES)
# Need a timeout because GATE can take a long time if given garbage text
@celery.task(time_limit=60)    
def extract_terms(raw_text, domain_model):
    terms = term_extractor.extract_terms(raw_text, domain_model)
    return terms
Beispiel #3
0
from __future__ import absolute_import, unicode_literals

import os
from celery.app.base import Celery

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'project.settings')

app = Celery('project', backend='redis', broker='redis://localhost:6379')

# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')

# Load task modules from all registered Django app configs.
app.autodiscover_tasks()


@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))
Beispiel #4
0
from celery import shared_task
from celery.app.base import Celery
from customcrawler import celeryconfig
from customcrawler.retry_mechanism import retry_session
from customcrawler.models import URL_details
from sqlalchemy.orm import sessionmaker
from customcrawler.models import db_connect

app = Celery('customcrawler', broker='amqp://')
app.config_from_object(celeryconfig)

session_retry = retry_session(retries=5)
headers = {'User-Agent': 'Mozilla/5.0'}


class ProcessTask(object):
    def __init__(self):

        engine = db_connect()
        self.Session = sessionmaker(bind=engine)

    def run(self, base_url, job_data_id):

        url = "http://axe.checkers.eiii.eu/export-jsonld/pagecheck2.0/?url=" + base_url

        r = session_retry.get(url=url, headers=headers)

        data = r.json()

        total_violations = 0
        total_verify = 0
Beispiel #5
0
from celery import platforms
# Prevent main celery proc from killing pre-forked procs,
# otherwise killing celery main proc causes sync main firex proc
# to hang since broker will remain up.
platforms.set_pdeathsig = lambda n: None

from celery.app.base import Celery
from firexkit.task import FireXTask
from celery.signals import celeryd_init
from firexapp.submit.install_configs import install_config_path_from_logs_dir, load_existing_install_configs

firex_app_name = '.'.join(__name__.split(".")[:-1])
app = Celery(strict_typing=False,
             task_cls='%s:%s' % (FireXTask.__module__, FireXTask.__name__))
app.config_from_object(firex_app_name + '.default_celery_config')


@celeryd_init.connect
def add_items_to_conf(conf=None, **_kwargs):
    conf.uid = app.backend.get('uid').decode()
    conf.logs_dir = app.backend.get('logs_dir').decode()
    conf.link_for_logo = conf.logs_dir
    conf.logs_url = None
    conf.resources_dir = app.backend.get('resources_dir').decode()

    install_config = install_config_path_from_logs_dir(conf.logs_dir)
    assert os.path.isfile(install_config), \
        f"Install config missing from run, firexapp submit is expected to have populated it: {install_config}"

    # TODO: assumes everywhere celery is started can load from logs_dir. Should likely serialize to backend.
Beispiel #6
0
from celery.app.base import Celery
# from celery.contrib import rdb
from billiard.context import Process
from scrapy.utils.project import get_project_settings
from celery.utils.log import get_task_logger
from scrapy.crawler import CrawlerProcess
# import user spiders
#from Crawlers.spiders import


# Create celery app
celery_app = Celery('tasks',
                    broker='redis://localhost:6379/0',
                    backend='redis://localhost:6379/0')
celery_app.config_from_object('celeryconfig')


class UrlCrawlerScript(Process):
        def __init__(self, spider):
            Process.__init__(self)
            settings = get_project_settings()
            self.crawler = CrawlerProcess(settings)
            # self.crawler.configure()
            # self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
            self.spider = spider

        def run(self):
            self.crawler.crawl(self.spider)
            self.crawler.start()
            # reactor.run()
Beispiel #7
0
from __future__ import absolute_import

import os

from django.conf import settings
from celery.app.base import Celery
import configurations

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app.settings.settings')
os.environ.setdefault('DJANGO_CONFIGURATION', 'Production')
configurations.setup()

app = Celery('app')
app.config_from_object('django.conf:settings')

app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)


@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))
Beispiel #8
0
# http://stackoverflow.com/questions/24232744/scrapy-spider-not-following-links-when-using-celery

from celery.app import shared_task
from celery.app.base import Celery
from scrapy.crawler import Crawler
from scrapy.conf import settings
from scrapy import log, project, signals
from twisted.internet import reactor
from billiard import Process
from scrapy.utils.project import get_project_settings
from craigslist_sample.spiders.test2 import MySpider

from celery.utils.log import get_task_logger

app = Celery('tasks', broker='amqp://guest@localhost//')
app.config_from_object('celeryconfig')

logger = get_task_logger(__name__)

class UrlCrawlerScript(Process):
        def __init__(self, spider):
            Process.__init__(self)
            settings = get_project_settings()
            self.crawler = Crawler(settings)
            self.crawler.configure()
            # self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
            self.spider = spider

        def run(self):
            self.crawler.crawl(self.spider)
            self.crawler.start()
Beispiel #9
0
# -*- encoding: utf-8 -*-
'''
Created on 2016年9月5日

@author: huawei
'''
from celery.app.base import Celery
# from cabbage.test.celery_test import celeryconfig
from kombu.entity import Queue, Exchange


test ="amqp://*****:*****@172.16.4.134:5672/cabbage_vhost"
#server="amqp://*****:*****@10.0.137.88:5672/cabbage_vhost"

app = Celery('aaa',backend="rpc://",broker=test)
# celeryconfig.CELERY_QUEUES =  ( Queue('default', Exchange('default'), routing_key='default'),
#                    Queue('cabbage', Exchange('cabbage'), routing_key='cabbage'))
app.config_from_object('cabbage.test.celery_test.celeryconfig')
app.worker_main()
Beispiel #10
0
import sys
import threading
import time
# if "/Users/hua/workspace/python/cabbage" not in sys.path:
#         sys.path.append("/Users/hua/workspace/python/cabbage")
#         sys.path.append("/Users/hua/workspace/python/cabbage/src")
        


cfg = ConfigParser.ConfigParser()
cfg.read(os.getcwd().split("cabbage")[0]+'cabbage/cabbage.cfg')



app = Celery('cabbage',backend="amqp",broker='amqp://172.16.4.134')
app.config_from_object("cabbage.test.celery_test.celeryconfig")

@implementer(ITask)  
class T:
    def __init__(self):
        pass
    
    @app.task(bind=True, filter=task_method,name="cabbage.test.test_celery.T.run")     
    def run(self):
        print "121212"
    
    @app.task(bind=True,filter=task_method,name="cabbage.test.test_celery.T.run2")     
    def run2(self,a,b):
        print "121212"
        return a*b