Example #1
0
def check_recent_users():
    """ Contrôle de routine sur des utilisateurs récents """
    from scoop.user.models import User
    from scoop.rogue.models import IPBlock
    users = User.objects.by_online_range(timezone.now(), timedelta(days=-90))
    # Envoyer le signal « IP bloquée » pour les utilisateurs concernés (Voir rogue.listeners.user)
    [IPBlock.objects.get_user_status(user) for user in users]
Example #2
0
class Config:
    DEBUG = True
#   celery配置
    CELERY_BROKER_URL = 'redis://localhost:6379/2'
    CELERY_RESULT_BACKEND = 'redis://localhost:6379/3'
    CELERY_ALWAYS_EAGER = True
#   注册任务
    CELERY_IMPORTS = ("app.tasks",)
#   数据获取、分析周期设置
    CELERYBEAT_SCHEDULE = {
        'job1': {
          'task': 'app.tasks.tk.job1',
          'schedule': crontab(minute=00, hour=11),
          'args': (),
        },
        'job2': {
            'task': 'app.tasks.job2',
            'schedule': timedelta(seconds=10),
            'args': (),
        },
    }
#   celery时区
    CELERY_TIMEZONE = 'Asia/Shanghai'


    @staticmethod
    def init_app(app):
        pass
Example #3
0
class Config:
    DEBUG = True
    SECRET_KEY = 'hard to guess string'
    # RESTFUL SETTING
    ERROR_404_HELP = False
    # SQLALCHEMY SETTING
    SQLALCHEMY_COMMIT_ON_TEARDOWN = True
    # SQLALCHEMY_ECHO = True
    SQLALCHEMY_TRACK_MODIFICATIONS = False

    # CELERY SETTING
    CELERYD_TASK_SOFT_TIME_LIMIT = 2000
    CELERY_DISABLE_RATE_LIMITS = True
    CELERYD_FORCE_EXECV = True
    CELERYD_MAX_TASKS_PER_CHILD = 30
    CELERY_DISABLE_RATE_LIMITS = True
    CELERY_TIMEZONE = 'Asia/Shanghai'
    CELERY_BROKER_URL = 'redis://localhost:6379/10'
    CELERY_RESULT_BACKEND = 'redis://localhost:6379/11'

    CELERYBEAT_SCHEDULE = {
        # use to auto inspect
        # 't1': {
        #     'task': 'add_together',
        #     'schedule': timedelta(seconds=3)
        # },
        'start_test': {
            'task': 'start_test',
            # 'schedule': crontab(minute='*/1'),
            'schedule': timedelta(seconds=3),
            "args": (),
        },
    }
Example #4
0
class Config(object):
    DEBUG = False
    TEST = False

    SQLALCHEMY_TRACK_MODIFICATIONS = True
    DATABASE_USER = os.getenv("DATABASE_USER")
    DATABASE_PASS = os.getenv("DATABASE_PASS")
    DATABASE_URI = os.getenv("DATABASE_URI")
    DATABASE_PORT = os.getenv("DATABASE_PORT")
    DATABASE_DB = os.getenv("DATABASE_DB")
    SQLALCHEMY_DATABASE_URI = "mysql+pymysql://{USER}:{PASS}@{URI}:{PORT}/{DBNAME}?charset=utf8".format(
        USER=DATABASE_USER,
        PASS=DATABASE_PASS,
        URI=DATABASE_URI,
        PORT=DATABASE_PORT,
        DBNAME=DATABASE_DB,
    )
    CELERY_BROKER_URL = "redis://127.0.0.1:6379/1"
    CELERY_RESULT_BACKEND = "redis://127.0.0.1:6379/2"
    CELERY_TASK_SERIALIZER = "json"
    CELERY_TASK_RESULT_EXPIRES = 3600
    CELERY_ACCEPT_CONTENT = ['json']
    CELERY_RESULT_SERIALIZER = 'json'
    CELERYBEAT_SCHEDULE = {
        'get_book_info': {
            'task': 'get_book_info',
            'schedule': timedelta(seconds=600)
            # 'schedule': crontab(hour=1)
        },
    }
    CELERY_TIMEZONE = 'UTC'
Example #5
0
def check_random_nonrecent_users():
    """ Contrôle de routine sur 500 utilisateurs non récents """
    from scoop.user.models import User
    from scoop.rogue.models import IPBlock
    users = User.objects.filter(last_online__lt=timezone.now() -
                                timedelta(days=89)).order_by('?')[0:500]
    # Envoyer le signal « IP bloquée » pour les utilisateurs concernés (Voir rogue.listeners.user)
    [IPBlock.objects.get_user_status(user) for user in users]
Example #6
0
def expire(self, order_id):
    try:
        order = Order.objects.get(id=order_id)
        order.queue_id = self.request.id
        print(f"queue_id: {order.queue_id}")
        order.save()
        expire_async.apply_async(args=(order.id, ),
                                 eta=datetime.utcnow() + timedelta(days=3),
                                 task_id=self.request.id)
    except:
        print(f"Failed retrieving order object of id {order_id}")
        return
Example #7
0
def addTask(request):

    intervalSchedule = IntervalSchedule.from_schedule(schedule(timedelta(seconds=10)))
    intervalSchedule.save()
    modelData = dict(
        name="dcTestPersist",
        task="technologytrackerapi.tasks.createRecord",
        interval_id=intervalSchedule.pk,
    )
    periodicTask = PeriodicTask(**modelData)
    periodicTask.save()
#    return periodicTask
#    n = periodicTask.name
    me = ModelEntry(periodicTask)
    try:
        me.save()
    except:
      from django.db import connection
      print connection.queries
      raise
#    return me
    return render_to_response('taskView.html')
Example #8
0
class Config:
    DEBUG = True
    #   celery配置
    CELERY_BROKER_URL = 'redis://192.168.0.203:6379/2'  #redis 服务器配置
    CELERY_RESULT_BACKEND = 'redis://192.168.0.203:6379/2'  #redis 服务器配置
    CELERY_RESULT_SERIALIZER = 'json'
    CELERY_TASK_SERIALIZER = 'json'
    CELERY_ACCEPT_CONTENT = ['application/json']
    #   数据获取、分析周期设置
    CELERYBEAT_SCHEDULE = {
        'job1': {
            'task': 'app.tasks.job1',
            'schedule': timedelta(seconds=10),
            'args': (),
        }
    }
    #   celery时区
    CELERY_TIMEZONE = 'Asia/Shanghai'

    @staticmethod
    def init_app(app):
        pass
Example #9
0
import djcelery  #导入django-celery模块

djcelery.setup_loader()  #:进行celery加载
BROKER_URL = "redis://127.0.0.1:6379/1"  #任务容器地址,redis数据库地址
CELERY_IMPORTS = ('CeleryTask.tasks')  #:具体任务文件
CELERY_TIMEZONE = 'Asia/Shanghai'
CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"  #celery事件的处理器,固定

from celery.schedules import crontab
from celery.schedules import timedelta

CELERYBEAT_SCHEDULE = {
    u"测试任务1": {
        "task": "CeleryTask.tasks.sendDing",  #:任务函数
        "schedule": timedelta(seconds=5),  #:执行时间
        # "age":()
    }
}

# Crontab
# Crontab(hour=” * / 2”) #每2个小时执行一次
# Crontab(minute=0, hour=” * / 2”) #每2个小时的0分执行一次
# Crontab(minute=0, hour=” * / 2,8 - 12”) #每2个小时或者8点 - 12点执行一次
# Crontab(minute=0, hour=0,day_of_month =”2 - 31 / 2”) #偶数天执行
# Crontab(0,0,day_of_month =“1”,month_of_year =”5”) #每年的5月1号执行

# Timedelta
# Timedelte(seconds=1)  #每秒执行一次

ERROR_PATH = os.path.join(BASE_DIR, "error.log")
Example #10
0
        "exchange": "default",
        "exchange_type": "direct",
        "routing_key": "default"
    },
    "topicqueue": {  # 这是一个topic队列 凡是topictest开头的routing key都会被放到这个队列
        "routing_key": "topic.#",
        "exchange": "topic_exchange",
        "exchange_type": "topic",
    },
    "task_eeg": {  # 设置扇形交换机
        "exchange": "tasks",
        "exchange_type": "fanout",
        "binding_key": "tasks",
    },
}
from celery.schedules import crontab
from celery.schedules import timedelta

CELERYBEAT_SCHEDULE = {    #定时器策略
    #定时任务一: 每隔30s运行一次
    u'测试定时器1': {
        "task": "sbcelery.tasks.tsend_email",
        #"schedule": crontab(minute='*/2'),  # or 'schedule':   timedelta(seconds=3),
        "schedule":timedelta(seconds=30),
        "args": (),
    },
}

#############################
# celery 配置信息 end
#############################
Example #11
0
# 公钥
alipay_public_key_string = '''-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoKgRki1TWBrFIEbZ486mrwE6qj0yQa2I5ybPIp618JHXq1XpRSUyoFjW7xEGgvEI2zSA6+qk927Cp2KZddRHMAGOowmjNswhdT1nIcK9MCtX0GRE5E4JOLUzD0Ir0SBihQyrdMxZxmwhDs4o2HJdyC5CkSYeGuMH4CxmcM7CBU5mgwHLXmvjWldT/QTsvos0eGELZRq7o/GYAo/S7N3Qb4z4FPwdTEzyqMYFH+2s+Hkpg3QApPGzti8lD/MfjaykU09wXzAIuYcQ8/VvAzPGRC6FTbFqaEd2R+UfRZ8mxBPlwSl5awpg0uGzTp28INAUnYlA1OvabazqDkG3KOo4GQIDAQAB
-----END PUBLIC KEY-----'''
# 私钥
alipay_private_key_string = '''-----BEGIN PRIVATE KEY-----
MIIEogIBAAKCAQEAoKgRki1TWBrFIEbZ486mrwE6qj0yQa2I5ybPIp618JHXq1XpRSUyoFjW7xEGgvEI2zSA6+qk927Cp2KZddRHMAGOowmjNswhdT1nIcK9MCtX0GRE5E4JOLUzD0Ir0SBihQyrdMxZxmwhDs4o2HJdyC5CkSYeGuMH4CxmcM7CBU5mgwHLXmvjWldT/QTsvos0eGELZRq7o/GYAo/S7N3Qb4z4FPwdTEzyqMYFH+2s+Hkpg3QApPGzti8lD/MfjaykU09wXzAIuYcQ8/VvAzPGRC6FTbFqaEd2R+UfRZ8mxBPlwSl5awpg0uGzTp28INAUnYlA1OvabazqDkG3KOo4GQIDAQABAoIBABXkENDcQDkHHMkzHkl+RRQflMDRqeFtJfRpQ1wySBRJqxt7j1eOpAFZWaAlr79z3IMR+mcrB+N3QirQspxtmm2eKLNqgsTat8xj24OsJ19C6KpKn2CEiZkih5ySpanPQd1jRpGZrrnszexYhxRHMSQvuX5RtVRwjwgqxKKmaaWTO/ToS5KuRLR1TuYVK/phnkEtj+5lKMXSYhIPj9PQr1puEDWJDIrFFjgeyZqjwH+gHywve8GqiM+xYOgY7A9ACmTLFESEP0WmbZF1e8KzP+7JQZdEI/T19kxaBVXbcwLZ7+RO0eWzzRa877ZWo2TNIn+ZHzJL4FLhuMwCRUn+m6ECgYEA0WWW3Sk3nvC7EF2PC6KsAKK6ZeqGKRahbwxuL7zLvzi7wHNkhwNgWKsmaotrkOAO924AekLb4Hi1/xpf/6oTFOAEGXV8lQqORhYd+7zWXTsBdIlBhzbQmWJ86W3KeE9wfi5hsBbaOn7t9hnBmPA0Q4JdoOthOZAEUubBSXbB130CgYEAxGl+aRsQZKvU7ey+7BKYYDBcIvxpmF89eNDuf3xtBiyDgqL9S/vvJzqn+4y6qqXI49AdWjPOxgvuNXRnfP7yGHTqLxOnwvsBpPizlMDNmK4YIKJUnk1yLsLh4kCdyYJQa3cROfoQ/oWpIjGjnO2DAstfB/3dWEIbnQKa0pj9nc0CgYBm5eId6hxACZITN7aMhDK72Tt5y6aD2HAaDuSyprcEz89Lgij7Q8h7qhclsj94oPIZ2r93VRWRmB/vLTnRe/UGhBLfo+FFqDtD66huRVyd0dokNzKxTFlzlndFikM9nePszcQJcSFqL2emUP/WtOsp5Y02/5P2YnQNfQGspSQaTQKBgBl9/7QwmQ4X50kCIz2MpE5HuI2p6SKnqdjWdT4CrjvQ5zi7YtjL2BxlVowllcy0O8ClsEmW082MmtxBQXVNuapG8mYtzOZXob0Bsn0qEQUyA1uo9gad0qYTETJGZLRUv49TIt89f5spSexwOOYTRZ/FOY5V+raLzf2w6ttOlAw5AoGAKM3/EwyI6oK74v36Hhdxk+dWeMLVrhCKISj9XRKGXR7FfOQHpgBdOvr6r5AD0qtxDr1+GxRuom9VWH2Bkv1NXrP40e9cbRMg9c/eCiasEc7mWsExdVDboaRgla0npcGFRyk3EcFq+gltl8H6JLrI9xR2gBgnTPhQkhbp3Lt2JFk=
-----END PRIVATE KEY-----'''

# 导入djcelery包
import djcelery
# 进行模块载入
djcelery.setup_loader()
# 中间人,指定redis,第二个数据库
BROKER_URL = 'redis://127.0.0.1:6379/1'
# 具体的任务文件
CELERY_IMPORTS = ('CeleryTask.tasks')
# celery时区,跟django保持一致
CELERY_TIMEZONE = 'UTC'
# django-celery的处理器,是固定的
CELERYBEAT_SCHEDULER='djcelery.schedulers.DatabaseScheduler'

# 定时任务
from celery.schedules import timedelta
CELERYBEAT_SCHEDULE = {
    u'测试任务':{
        'task':'CeleryTask.tasks.test',    # 执行的任务函数
        'schedule':timedelta(seconds=1)    # 执行时间,隔一秒执行一次
    }
}
Example #12
0
# coding: utf-8
from celery.schedules import crontab, timedelta
from celery.task import periodic_task
from django.utils.translation import ugettext_lazy as _
from scoop.location.models import City, Currency


@periodic_task(run_every=crontab(hour=0, minute=1), options={'expires': 3600})
def update_currency_balances():
    """ Mettre à jour les valeurs des devises """
    return Currency.objects.update_balances()


@periodic_task(run_every=timedelta(minutes=30),
               rate_limit='3/m',
               options={'expires': 3600})  # 20 minutes = 48 appels/jour
def auto_fetch_city_pictures():
    """ Ajouter progressivement des images aux villes de 15 000+ habitants """
    cities = City.objects.by_population(['FR'], 15000).filter(
        city=True, pictured=False).order_by('-population')
    fetch_count = 0
    if cities.exists():
        for city in cities[0:2]:  # 2 requêtes max par appel : 96 requêtes/jour
            if not city.has_pictures(
                {}):  # aucun filtre sur les images modérées
                fetched = city.fetch_picture()
                fetch_count += fetched
                print(
                    _("Successfully fetched {count} images for city {city}").
                    format(count=fetched, city=city))
    return {'fetched': fetch_count}
Example #13
0
# coding: utf-8
from celery.schedules import crontab, timedelta
from celery.task import periodic_task
from django.conf import settings
from django.db import transaction
from django.utils import timezone
from easy_thumbnails.files import generate_all_aliases
from scoop.content.models.picture import Picture
from scoop.core.util.django.sitemaps import ping_feed


@periodic_task(run_every=timedelta(hours=48), options={'expires': 3600})
def clean_transient_pictures():
    """ Supprimer les images volatiles de plus de 24 heures """
    limit = timezone.now() - timedelta(hours=2)
    pictures = Picture.objects.filter(transient=True, updated__lt=limit)
    for picture in pictures:
        picture.delete(clear=True)


@periodic_task(run_every=timedelta(hours=4), options={'expires': 3600})
def update_unsized_pictures():
    """ Mettre à jour les dimensions des images sans dimensions """
    with transaction.atomic():
        pictures = Picture.objects.filter(width__in=[0, 1],
                                          height__in=[0,
                                                      1]).order_by('?')[:256]
        for picture in pictures:
            picture.update_size()
    return True
Example #14
0
#############################
import djcelery

djcelery.setup_loader()
BROKER_URL = 'redis://:[email protected]:6379/1'
CELERY_IMPORTS = ('vei.tasks')
CELERY_TIMEZONE = 'Asia/Shanghai'
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
from celery.schedules import crontab
from celery.schedules import timedelta

CELERYBEAT_SCHEDULE = {  # 定时器策略
    # 定时任务一: 每隔30s运行一次
    u'获取代理': {
        "task": "vei.tasks.getproxy",
        "schedule": timedelta(minutes=10),
        "args": (),
    },
    u'测试代理': {
        "task": "vei.tasks.proxytest",
        "schedule": timedelta(minutes=20),
        "args": (),
    },
}
#############################
# celery 配置信息 end
#############################

# 邮件配置
FROM_ADDR = "******"  #邮箱账号
EMAIL_PASSWORD = "******"  # 邮箱密码
Example #15
0
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/

STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'assets', 'static')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'assets', 'media')

# Celery
from celery.schedules import timedelta, crontab
BROKER_URL = 'django://'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
CELERYBEAT_SCHEDULE = {
    'check-expired-job': {
        'task': 'hub.tasks.task_periodic_check_jobtime',
        'schedule': timedelta(seconds=3600), #run every hour
    },
    # 'tweet-todays-job': {
    #     'task': 'hub.tasks.task_tweet_todays_job',
    #     # 'schedule': timedelta(seconds=10),
    #     'schedule': crontab(minute=0, hour='19'), #run on 7am in the morning
    # },
}

# Site Domain
SITE_DOMAIN = 'http://localhost:8000'

# Login URL
LOGIN_URL = '/login/'

# Login Radius settings
Example #16
0
CACHE_MIDDLEWARE_ALIAS = 'default'

# DATABASE_ROUTERS = ['mydbrouter.Router',]

## 公钥
alipay_public_key_string = """-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtexG8P2qyi04DUrUEQnbpfCYS7im27E0q55gsDiE+g/JdWwJJ3/I2PRPbyz38ah2mv66GFdE4j2nXarL3jSgoP995mZzmllGLDSOWbFI2QB+7ZMikVFXzjWZ68FcQMEJExuC7ikD4vuY8J4Wt00NerbSwKCWL4CjvV3CxcBaJXPn3kIOzItX0yDToK9rvZ9E4P1PIURe96Q/SNz+GkJaR32PQJotUOrsg0gVdWhSBIjRuSpvUpUlPEZrzprhyYn/zZ6TktKaURzfVqSpRa0h+Z9y03sIIDVLSWRlaDKw81WsVKfJlUcN9WA/uypMOOQb6MXolZEYMRm+EDgEFT0oUQIDAQAB
-----END PUBLIC KEY-----"""
## 私钥
alipay_private_key_string = """-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEAtexG8P2qyi04DUrUEQnbpfCYS7im27E0q55gsDiE+g/JdWwJJ3/I2PRPbyz38ah2mv66GFdE4j2nXarL3jSgoP995mZzmllGLDSOWbFI2QB+7ZMikVFXzjWZ68FcQMEJExuC7ikD4vuY8J4Wt00NerbSwKCWL4CjvV3CxcBaJXPn3kIOzItX0yDToK9rvZ9E4P1PIURe96Q/SNz+GkJaR32PQJotUOrsg0gVdWhSBIjRuSpvUpUlPEZrzprhyYn/zZ6TktKaURzfVqSpRa0h+Z9y03sIIDVLSWRlaDKw81WsVKfJlUcN9WA/uypMOOQb6MXolZEYMRm+EDgEFT0oUQIDAQABAoIBAGDUC8ZFHdxSSR06ELmo55HhBw52j8kq/n/B4nCpBI4cTPwErrKpXvuqvYTNCINFSSuiHObLvEw2yJggSjZRCJXoptg0+57RmXn51zKCG+X0T5qfz6xNAVEuUmibGEEW/X+ACyY8CmeLxpF7c1fI2T3RhUclsgpCi+REvWCHyvNXYIj5S8c3+KBXGzdmmwv97PTtkkMW4K2QDG9wcimZU/stUrtsx8vBm9T4TmDz3BiXnCHoJDJCx5j9R8MlrT2hD2HdwxjZzMNBveHNdDK2mWrCVnzNzJiUnBPxh0ObJU+Qf9g32Gmyn/8QZbRDDiN6az+NWiMJx/ikPQTB4QWdYckCgYEA8YSEAVCU8bTmRDS//uOB6sGzt1KDG3fmTuSuupoJT4FC7UiUg0eo3ZfvqHrowkVccEgRgKCtUpWNrR1SzheXvc8QajeLqGP3dk5vI2naseqCE1nhqW0i8BRezjrPiWxkzG4aNO2BLTco+ZV7AJYW0E0hMW2YDu9kqrCRzV2PVZcCgYEAwNTvwwEFW2LZwv8gzITqg9S71phz0u4DSvkvlm57FO2G/RWwp5S7ukvLPgOgVHVymFsfUQy2J0HcvqFysvyJulj+PHK9onddnm8msrLsUG66ui1QrIUP06p2v98IaG2jKJxFCX79XPr67UgKXq8kX/s491NkHq774pdUy9InvlcCgYBnp2b8JXh3MBtvhHAuVbgpZ87Yy/nm7ROUIoN3JKsAS0rNCcxrd3La/91korOIxToCGnwgh1U7z2HJvX8PYoLGfLrfy00ODTFkvg7m1QR+PVZsNbQrAeLvxN5XhlgR88pjDpICyzgYjsbwLx5mRwQtjBzF2PJc3pOGylcZG6FrqwKBgQCUoQwU0Cqi37RdGmzbdu+ToVsO8v8Da7VaCmtllc6EuPg9BoTdBkUUOOt05zKjJsunJ0UiIZwc8iUFQke4MfKukX2UdhQ4r6yXO7EmN8bx0AdZDSiLcRxb154kEfLXGvqRiLGluh3rlv/l+IsVpAVzfZ3Q9JPNGq7HXkFbwKYljQKBgGO9dozcq7hspOEsM+tlN2Z21SxM3mK+udOntXw+xD3iSbksiI9GOmHs09Np/95TN21lwl1h7r40/5rxC6s0cfrPd+7c0QhsgM9yOE1onkazY5DLmGDaW979HZLuB1y4V2ksw2ttJfirRtHPYRBcHS/XVmEzTUGk4zfqA4lkQ15x
-----END RSA PRIVATE KEY-----"""

import djcelery  ## 导入djcelery包
djcelery.setup_loader()  ## 进行模块载入
BROKER_URL = 'redis://127.0.0.1:6379/1'  ## 中间件  中间人  指定redis
CELERY_RESULT_BACKEND = 'redis://127.0.0.1:6379/2'
CELERY_IMPORTS = ('CeleryTask.tasks')  ##  具体的任务文件
CELERY_TIMEZONE = 'Asia/Shanghai'  ## celery时区  跟django保持一致
## diango-celery的处理器,是固定的
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'

from celery.schedules import timedelta, crontab

CELERYBEAT_SCHEDULE = {
    u'测试任务': {
        "task": "CeleryTask.tasks.test",  ## 任务函数
        "schedule": timedelta(seconds=1)  ## 执行时间  隔一秒执行一次
    }
}
Example #17
0
from celery.schedules import timedelta

CELERY_IMPORTS = ('automated_tasks')
CELERY_TASK_RESULT_EXPIRES = 30
CELERY_TIMEZONE = 'UTC'

CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'

CELERYBEAT_SCHEDULE = {
    'create_contact': {
        'task': 'automated_tasks.create_random_contact_15sec',
        'schedule': timedelta(seconds=15),
    },
    'delete_contact': {
        'task': 'automated_tasks.delete_task_older_1min',
        'schedule': timedelta(minutes=1),
    }
}
Example #18
0
from celery.decorators import periodic_task
from celery.schedules import timedelta

from apps.auth.models import CustomUser
from utils.metrics import Gauge


@periodic_task(run_every=timedelta(seconds=5))
def gauge_auth():
    Gauge('auth.CustomUser').report(CustomUser.objects.count())
Example #19
0
import logging

from celery.schedules import timedelta
from celery.task import periodic_task
from django.apps import apps
from django.contrib.auth import get_user_model
from django.db import transaction
from django.utils import timezone
from scoop.user.access.models import UserIP
from scoop.user.models.activation import Activation

logger = logging.getLogger(__name__)
User = get_user_model()


@periodic_task(run_every=timedelta(minutes=2), options={'expires': 15})
@transaction.atomic()
def clean_online_list():
    """ Mettre à jour la liste des utilisateurs en ligne """
    User._clean_online_list()
    User.get_online_users().update(last_online=timezone.now())
    return True


@periodic_task(run_every=timedelta(days=1), options={'expires': 3600})
def rebuild_users():
    """ Assurer l'intégrité des liens de clés étrangères """
    # Assigner les villes non définies, à celles des IP
    if apps.is_installed('scoop.location'):
        users = User.objects.filter(profile__city__isnull=True)
        for user in users:
Example #20
0
# 导入任务所在文件
imports = [
    "regular_celery.tasks.task1",  # 导入py文件
    "regular_celery.tasks.task2",
]

# 参数
v1 = 'sss'
v2 = "aaa"

# # 需要执行任务的配置
beat_schedule = {
    "task1": {
        "task": "regular_celery.tasks.task1.celery_run",  # 执行的函数
        # "schedule": crontab(minute="*/1"),   # every minute 每分钟执行
        "schedule": timedelta(seconds=3),  # 3秒执行一次
        "args": (v1, v2)  # # 任务函数参数
    },
    # "task2": {
    #     "task": "regular_celery.tasks.task2.celery_run",
    #     # "schedule": crontab(minute=0, hour="*/1"),   # every minute 每小时执行
    #     "schedule": timedelta(seconds=5),
    #     "args": ()
    # },
}

# 异步任务
# # 导入Queue
# from kombu import Queue
# # 导入Task所在的模块,所有使用celery.task装饰器装饰过的函数,所需要把所在的模块导入
# # 我们之前创建的几个测试用函数,都在handlers.async_tasks和handlers.schedules中
Example #21
0
MIIEpAIBAAKCAQEAnIIYur27kzgkV51p14bNhr/lN8eDUIIOc1+189LCo8rLNb9WYC8q+RypvFFf1uiK8ujeu+1ynLR0OBGwBgx1vzsWyfsg97XeHobfwbrPUmUI9jbYFsk6UD+7eZl7TfAL/ERmpCkJWliKIEcSWWAcD4uxDT/baZ+6hoRja4nH4tBCBzBPWYh4Qut9E0t7jMKCCd46SU7M4WNcOInlRTzu6mfF8LqRhXyGMt2oIj916W9B1eiFHiJ+61/rEghm0Li4kv4vNnac52IE04TXy+8CtksWJ47DFTOcYH2u8wFOBSU3GY2wKzI7yogIzwHgLqK5GT7wkHAQckpn70qazjr2tQIDAQABAoIBABa/ukR6i6dMg8vQb7AKQhmSDwlakLXFEcCnatU0D2KreXoog6+ba42mIu3ijiG4z2mbe7SpQP2SJUp5F7LpYLwZJKjbPeGDp/Ob+y43ryb01KalNiepvDYp7WAxdQDRIYzbjGfUJy3grMMgUYR4OdvwnB2m6Iej1gLzf1gEQO+wx5Q3b8J3OQPf4iLlDggpzx4KnGQlUUnRyWrH3qqsnF+DY5HPPc5P2BwHCfsFmmolVwSBqoRoXB8tFCZMXI6s8/R+TcHtLOdPM8bOEGwqHpS+wFRDEKFXqb5/nMaW+udNfYvsEflGEReqSMZsyzXbxueYNaCLwVyIoM80872HH8kCgYEAzGzOiLKnEZVCX7zR4YJqIMuNe1goHQHjLZFynIovNdz3bFMfXlmy8Xd3WJfx0PKZrKZVPG7opZRoeJMCD6Hx2O/0wN9KcS60aCaiNZJnSKTMrovQjUqyKxALK0DiRKSL8JdTHq+qr9E25Mwc9DVdvUvqVFdNCvUh9hNti5/rsR8CgYEAw/58iv6fvETUHMeHLMrfoNS1Z1Ahit025Bbnu3eepu+rSDkTjRpUL1BNsa6KVzK3POHyA3SeEvg7IjbGMlZ0rS7GFBeQY0iOyRIYq7tesoU6+e+bCQIgZiFhtf+GPucC0B5SfSE7e+kaF1yOjyJOXnIlAsDdvkP4hP9X5qRseasCgYEAmyDytk+EctZmsQoz50K1UL/HVNO4VRLql9jpNZuzadeONzj48/tzzMPQ4H0lt19yeM8cnai4iXaOtPkyNjS5t9uYS4jnD+7WXrb6n1bDZCATZ12YXLBTdlRNdXxeeKK5w1DCdeXuzE8irguq6TNaOF1UrL43K9qL9BYYKj2oeRcCgYAIT5NCZZeqaRTBf6h4usWO0VY74kb513WLaHk9Fs5wb7tInbr5gcNOGk6hGTCej/T7LO2RPfGyBjqjscTnv4jFCzW1BmbF/v6nAhBvv8s9MK8WiBV/5Uowanv1NreflTYmUxLWYYFfOLw1f2RAJ4lBMf/lxP3iIom4QgedLR24bwKBgQCuc0zxttiMSrWHBHtJDOo9pJV3rSngl1iMELqe197LIm7946M5IFmCL6hJcoOo4oudiV0vbAHD9ndrrZrrXNPnL2s79O6appFCG7y3yJS49slTSdqVYnSn8T1yS+7/l3c/pWgaz6j6KP7nUcgsgkSPJBo7B7KTr+gGz31cVsjFzQ==
-----END RSA PRIVATE KEY-----"""

ERROR_PATH = os.path.join(BASE_DIR,"error.log")

# 钉钉助手
DING_URL = """https://oapi.dingtalk.com/robot/send?access_token=a83286c4644275f5f2c3095144b7453819a322b3583d0a119f599fc0ac62ef48"""
# celery 配置
import djcelery


djcelery.setup_loader()# 模块加载
BROKER_URL = 'redis://127.0.0.1:6379/1' # 任务容器地址,redis数据库地址
CELERY_IMPORTS = ('CeleryTask.tasks') # 具体任务文件
CELERY_TIMEZONE = 'Asia/Shanghai' # celery 时区
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler' # celey处理器,固定



from celery.schedules import crontab
from celery.schedules import timedelta

CELERYBEAT_SCHEDULE = {
    u"测试任务":{
        "task":"CeleryTask.tasks.sendDing",
        "schedule":timedelta(seconds=10)
    }
}


Example #22
0
    """
    from scoop.messaging.models import Thread, Message
    # Date en-dessous de laquelle supprimer les sujets
    if hasattr(settings, 'MESSAGING_PRUNE_MONTHS'):
        months = settings.MESSAGING_PRUNE_MONTHS
    when = DatetimeModel.get_last_days(months * 30, timestamp=False)
    # Trouver les sujets dont la date de modification n'excède pas when
    threads = Thread.objects.filter(updated__lt=when)
    threads.update(deleted=True)
    # Trouver les messages orphelins
    messages = Message.objects.filter(
        Q(thread__deleted=True) | Q(thread__isnull=True))
    messages.update(deleted=True)


@periodic_task(run_every=timedelta(days=1), options={'expires': 30})
def prune_alerts():
    """ Effacer les alertes lues il y a plus de n jours """
    from scoop.messaging.models.alert import Alert
    # Supptimer les alertes
    alerts = Alert.objects.read_since(minutes=2880)
    alerts.delete()


@periodic_task(run_every=timedelta(seconds=150), options={'expires': 10})
def send_mail_queue_non_forced():
    """ Expédier les mails de la file d'attente """
    from scoop.messaging.models.mailevent import MailEvent
    # Traiter séparément les catégories
    return MailEvent.objects.process(forced=False)
Example #23
0
djcelery.setup_loader()  # 加载模块
BROKER_URL = 'redis://127.0.0.1:6379/1'  # 任务容器地址,Redis数据库地址
CELERY_IMPORTS = ('CeleryTask.tasks')  # 具体的任务文件
CELERY_TIMEZONE = 'Asia/Shanghai'  # celery时区
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'  # celery处理器

# celery 的定时器
from celery.schedules import crontab
from celery.schedules import timedelta

CELERYBEAT_SCHEDULE = {  # 定时器策略
    # 定时任务一:每隔30s运行一次
    u'测试定时器1': {
        'task': 'CeleryTask.tasks.taskExample',
        # 'schedule': crontab(minute='*/2'),
        'schedule': timedelta(seconds=30),
        'args': (),
    },
    u'熊大的叫床服务': {
        'task': 'CeleryTask.tasks.DingTalk',
        # 'schedule': crontab(minute='*/2'),
        'schedule': timedelta(seconds=3),
        'args': (),
    },
}

# The cache backends to use.
# CACHES = {
#     'default': {
#         'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',# 默认使用本地缓存
#     }
Example #24
0
from settings import DATABASES, TIME_ZONE

import djcelery
djcelery.setup_loader()
BROKER_URL = "django://"
BROKER_BACKEND = "djkombu.transport.DatabaseTransport"
CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"
BROKER_HOST = DATABASES['default']['HOST']
BROKER_PORT = DATABASES['default']['PORT']
DATABASE_USER = BROKER_USER = DATABASES['default']['USER']
DATABASE_PASSWORD = BROKER_PASSWORD = DATABASES['default']['PASSWORD']
DATABASE_ENGINE = DATABASES['default']['ENGINE']
DATABASE_NAME = DATABASES['default']['NAME']
CELERY_RESULT_DBURI = DATABASES['default']
CELERY_TIMEZONE = TIME_ZONE
CELERY_IMPORTS = ("puzzle.tasks",)

from celery.schedules import crontab, timedelta
CELERYBEAT_SCHEDULE = {
    "save_sessions": {
        "task": "puzzle.tasks.save_cached_sessions",
        "schedule": timedelta(minutes=1),
    }
}
Example #25
0
# CELERY_FORCE_EXECV = True# 有些情况可以防止死锁
# CELERY_QUEUES # Celery队列设定
CELERY_TASK_RESULT_EXPIRES = 50  # 60 * 60 * 24   # 任务结果的过期时间,在上边设置的redis中
# 规定完成任务的时间
# CELERYD_TASK_TIME_LIMIT = 5 #15 * 60 # 在15分钟内完成任务,否则执行该任务的worker将被杀死,任务移交给父进程

# BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3} # 任务发出后,经过一段时间还未收到acknowledge , 就将任务重新交给其他worker执行

# celery的定时任务都是有celery beat来进行调度,celery beat默认按照settings.py之中的时区时间来调度定时任务。
# celery 创建定时任务  Celery计划任务设定
CELERYBEAT_SCHEDULE = {
    # 定时任务一: 每隔s运行一次
    'task1': {
        # "task": "apps.orders.tasks.order_to_mission",
        "task": "app1.tasks.add",
        "schedule": timedelta(seconds=5),
        "args": (100, 200),
    },
    # 定时任务二: 每隔s运行一次
    'task2': {
        "task": "app1.tasks.add1",
        "schedule": timedelta(seconds=1),
        "args": (10, 20),
    },
}

from django.core.management.base import BaseCommand

CRONJOBS = [
    # ('*/1 * * * *', 'app名.定时函数所在的py文件名.定时函数名', '>> 输出文件路径和名称')
    # 每1分钟执行restart_pm2函数,并将执行中的返回的内容全部打印到crontab.log文件中
Example #26
0
def clean_transient_pictures():
    """ Supprimer les images volatiles de plus de 24 heures """
    limit = timezone.now() - timedelta(hours=2)
    pictures = Picture.objects.filter(transient=True, updated__lt=limit)
    for picture in pictures:
        picture.delete(clear=True)
Example #27
0
        },
        'imagery_process': {
            'task': 'imagery.tasks.process_all',
            'schedule': crontab(minute=30, hour='5,13,22')
        },
        'imagery_not_found_scenes_alert': {
            'task': 'imagery.tasks.not_found_scenes_alert',
            'schedule': crontab(minute=59, hour='23')
        },
        'sentinel_query_and_download': {
            'task': 'sentinel_catalog.tasks.query_and_download_all',
            'schedule': crontab(minute=0, hour='20, 4')
        },
        'sentinel_extract_all': {
            'task': 'sentinel_catalog.tasks.extract_all',
            'schedule': timedelta(hours=2)
        },
        'sentinel_process_all': {
            'task': 'sentinel_catalog.tasks.process_all',
            'schedule': timedelta(hours=3)
        },
        'sentinel_remove_expired_scenes': {
            'task':'sentinel_catalog.tasks.remove_expired_scenes',
            'schedule': crontab(hour='1')
        }
    },
)

if __name__ == '__main__':
    app.start()
Example #28
0
def configure_celery(flask_app, celery, test_config=None):
    database_url = shared_config["db"]["url"]
    engine_args_literal = ast.literal_eval(
        shared_config["db"]["engine_args_literal"])
    redis_url = shared_config["redis"]["url"]

    if test_config is not None:
        if "db" in test_config:
            if "url" in test_config["db"]:
                database_url = test_config["db"]["url"]

    ipld_interval = int(
        shared_config["discprov"]["blacklist_block_indexing_interval"])
    # default is 5 seconds
    indexing_interval_sec = int(
        shared_config["discprov"]["block_processing_interval_sec"])

    # Update celery configuration
    celery.conf.update(
        imports=[
            "src.tasks.index", "src.tasks.index_blacklist",
            "src.tasks.index_plays", "src.tasks.index_metrics",
            "src.tasks.index_materialized_views",
            "src.tasks.index_network_peers", "src.tasks.index_trending",
            "src.tasks.cache_user_balance", "src.monitors.monitoring_queue",
            "src.tasks.cache_trending_playlists",
            "src.tasks.index_solana_plays", "src.tasks.index_aggregate_views"
        ],
        beat_schedule={
            "update_discovery_provider": {
                "task": "update_discovery_provider",
                "schedule": timedelta(seconds=indexing_interval_sec),
            },
            "update_ipld_blacklist": {
                "task": "update_ipld_blacklist",
                "schedule": timedelta(seconds=ipld_interval),
            },
            "update_play_count": {
                "task": "update_play_count",
                "schedule": timedelta(seconds=60)
            },
            "update_metrics": {
                "task": "update_metrics",
                "schedule": crontab(minute=0, hour="*")
            },
            "aggregate_metrics": {
                "task": "aggregate_metrics",
                "schedule": timedelta(minutes=METRICS_INTERVAL)
            },
            "synchronize_metrics": {
                "task": "synchronize_metrics",
                "schedule": crontab(minute=0, hour=1)
            },
            "update_materialized_views": {
                "task": "update_materialized_views",
                "schedule": timedelta(seconds=300)
            },
            "update_network_peers": {
                "task": "update_network_peers",
                "schedule": timedelta(seconds=30)
            },
            "index_trending": {
                "task": "index_trending",
                "schedule": crontab(minute=15, hour="*")
            },
            "update_user_balances": {
                "task": "update_user_balances",
                "schedule": timedelta(seconds=60)
            },
            "monitoring_queue": {
                "task": "monitoring_queue",
                "schedule": timedelta(seconds=60)
            },
            "cache_trending_playlists": {
                "task": "cache_trending_playlists",
                "schedule": timedelta(minutes=30)
            },
            "index_solana_plays": {
                "task": "index_solana_plays",
                "schedule": timedelta(seconds=5)
            },
            "update_aggregate_user": {
                "task": "update_aggregate_user",
                "schedule": timedelta(seconds=30)
            },
            "update_aggregate_track": {
                "task": "update_aggregate_track",
                "schedule": timedelta(seconds=30)
            },
            "update_aggregate_playlist": {
                "task": "update_aggregate_playlist",
                "schedule": timedelta(seconds=30)
            }
        },
        task_serializer="json",
        accept_content=["json"],
        broker_url=redis_url,
    )

    # Initialize DB object for celery task context
    db = SessionManager(database_url, engine_args_literal)
    logger.info('Database instance initialized!')
    # Initialize IPFS client for celery task context
    ipfs_client = IPFSClient(shared_config["ipfs"]["host"],
                             shared_config["ipfs"]["port"])

    # Initialize Redis connection
    redis_inst = redis.Redis.from_url(url=redis_url)
    # Clear existing locks used in tasks if present
    redis_inst.delete("disc_prov_lock")
    redis_inst.delete("network_peers_lock")
    redis_inst.delete("materialized_view_lock")
    redis_inst.delete("update_metrics_lock")
    redis_inst.delete("update_play_count_lock")
    redis_inst.delete("ipld_blacklist_lock")
    redis_inst.delete("update_discovery_lock")
    redis_inst.delete("aggregate_metrics_lock")
    redis_inst.delete("synchronize_metrics_lock")
    logger.info('Redis instance initialized!')

    # Initialize custom task context with database object
    class DatabaseTask(Task):
        def __init__(self, *args, **kwargs):
            self._db = db
            self._web3_provider = web3
            self._abi_values = abi_values
            self._shared_config = shared_config
            self._ipfs_client = ipfs_client
            self._redis = redis_inst
            self._eth_web3_provider = eth_web3
            self._solana_client = solana_client

        @property
        def abi_values(self):
            return self._abi_values

        @property
        def web3(self):
            return self._web3_provider

        @property
        def db(self):
            return self._db

        @property
        def shared_config(self):
            return self._shared_config

        @property
        def ipfs_client(self):
            return self._ipfs_client

        @property
        def redis(self):
            return self._redis

        @property
        def eth_web3(self):
            return self._eth_web3_provider

        @property
        def solana_client(self):
            return self._solana_client

    celery.autodiscover_tasks(["src.tasks"], "index", True)

    # Subclassing celery task with discovery provider context
    # Provided through properties defined in 'DatabaseTask'
    celery.Task = DatabaseTask

    celery.finalize()
Example #29
0
#配置celery


djcelery.setup_loader()
BROKER_URL = 'redis://127.0.0.1:6379/5'
CELERY_IMPORTS = ('art.tasks',)#初始导入celery位置
CELERY_TIMEZONE = 'Asia/Shanghai' #时区
#配置任务定时队列存储的位置
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'

#配置定时任务
CELERYBEAT_SCHEDULE = {
    u'定时发邮件':{
        'task': 'art.tasks.sendEmailLog',
        'schedule': timedelta(seconds=10),
        'args':(),
    }
}


# --配置日志--
LOGGING = {
    'version': 1,
    'disable_existing_loggers': False,
    'formatters':{
        'simple':{
                'format':'[%(asctime)s]->%(module)s/%(funcName)s:%(message)s',
                'datefmt': '%Y-%m-%d %H:%M:%S'
        },
    },
Example #30
0
# coding: utf-8
from celery.schedules import timedelta
from celery.task import periodic_task


@periodic_task(run_every=timedelta(days=1))
def prune_reads():
    """ TODO: Supprimer les marques de lecture expirées """
    pass
Example #31
0
def configure_celery(flask_app, celery, test_config=None):
    database_url = shared_config["db"]["url"]
    engine_args_literal = ast.literal_eval(shared_config["db"]["engine_args_literal"])
    redis_url = shared_config["redis"]["url"]

    if test_config is not None:
        if "db" in test_config:
            if "url" in test_config["db"]:
                database_url = test_config["db"]["url"]

    # Update celery configuration
    celery.conf.update(
        imports=["src.tasks.index", "src.tasks.index_blacklist", "src.tasks.index_cache"],
        beat_schedule={
            "update_discovery_provider": {
                "task": "update_discovery_provider",
                "schedule": timedelta(seconds=5),
            },
            "update_ipld_blacklist": {
                "task": "update_ipld_blacklist",
                "schedule": timedelta(seconds=60),
            },
            "update_cache": {
                "task": "update_discovery_cache",
                "schedule": timedelta(seconds=60)
            }
        },
        task_serializer="json",
        accept_content=["json"],
        broker_url=redis_url,
    )

    # Initialize DB object for celery task context
    db = SessionManager(database_url, engine_args_literal)

    # Initialize IPFS client for celery task context
    gateway_addrs = shared_config["ipfs"]["gateway_hosts"].split(',')
    gateway_addrs.append(shared_config["discprov"]["user_metadata_service_url"])
    logger.warning(f"__init__.py | {gateway_addrs}")
    ipfs_client = IPFSClient(
        shared_config["ipfs"]["host"], shared_config["ipfs"]["port"], gateway_addrs
    )

    # Initialize Redis connection
    redis_inst = redis.Redis.from_url(url=redis_url)

    # Clear existing lock if present
    redis_inst.delete("disc_prov_lock")

    # Initialize custom task context with database object
    class DatabaseTask(Task):
        def __init__(self, *args, **kwargs):
            self._db = db
            self._web3_provider = web3
            self._abi_values = abi_values
            self._shared_config = shared_config
            self._ipfs_client = ipfs_client
            self._redis = redis_inst

        @property
        def abi_values(self):
            return self._abi_values

        @property
        def web3(self):
            return self._web3_provider

        @property
        def db(self):
            return self._db

        @property
        def shared_config(self):
            return self._shared_config

        @property
        def ipfs_client(self):
            return self._ipfs_client

        @property
        def redis(self):
            return self._redis

    celery.autodiscover_tasks(["src.tasks"], "index", True)

    # Subclassing celery task with discovery provider context
    # Provided through properties defined in 'DatabaseTask'
    celery.Task = DatabaseTask

    celery.finalize()
Example #32
0
USE_TZ = False

# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/

STATIC_URL = '/static/'
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, "static")

# STATIC_ROOT = os.path.join(BASE_DIR,"static")
import djcelery

djcelery.setup_loader()
BROKER_URL = 'redis://127.0.0.1:6379/1'
CELERY_IMPORTS = ('CeleryTask.tests')
CELERY_TIMEZONE = 'Asia/Shanghai'
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'

from celery.schedules import timedelta

CELERYBEAT_SCHEDULE = {
    u'测试1': {
        'task': 'CeleryTask.tests.add',
        'schedule': timedelta(seconds=1)
    }
}
#ckeditor
CKEDITOR_UPLOAD_PATH = 'upload'
CKEDITOR_IMAGE_BACKEND = 'pillow'
PAZE_SIZE = 1
Example #33
0
__mtime__ = '16/5/17'
"""

import time
import datetime
from src.base import BaseSpider
from src.config import SinaConfig
from celery.task import task
from celery.task import periodic_task
from celery.schedules import timedelta
from celery.schedules import crontab


# 测试celery的crontab定时任务和timedelta轮询任务
# @periodic_task(run_every=crontab(hour=11, minute=20))
# def test_crontab():
#     print(datetime.datetime.now())
#
#
# @periodic_task(run_every=timedelta(minutes=1))
# def test_timedelta():
#     print("hello timedelta: %s" % datetime.datetime.now())


@periodic_task(run_every=timedelta(minutes=5))
def spider_sina_news():
    start = time.time()
    spider = BaseSpider(config=SinaConfig)
    spider.run()
    end = time.time()
    print "spider sina news costs:%ds" % (end - start)
Example #34
0
from celery.schedules import timedelta
from decouple import config
import random
import custom_utils
from peewee import SqliteDatabase, Model, CharField, PrimaryKeyField
# import requests
# from bs4 import BeautifulSoup
from pyrogram import Client, InlineKeyboardMarkup, InlineKeyboardButton

url = config('REDIS_URL')
app = Celery('main', broker=url)
app.conf.timezone = 'Europe/Kiev'
app.conf.beat_schedule = {
    'send_film': {
        'task': 'task.film',
        'schedule': timedelta(hours=3)
    }
}

database = SqliteDatabase('films.sqlite3')
database.connect()


class Film(Model):
    id = PrimaryKeyField()
    url = CharField(max_length=250)

    class Meta:
        database = database

Example #35
0
from config import github_cve_monitor_job_time, hardware_info_monitor_job_time, nist_update_job_time
from celery import Celery
from celery.schedules import crontab, timedelta
from django.conf import settings

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Web.settings')

app = Celery('Medusa')  #这边要加上redis不然默认是mq的

# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
app.conf.beat_schedule = {
    'HardwareInfoMonitor': {
        'task': 'Web.SystemInfo.HardwareInfo.Monitor',
        'schedule': timedelta(seconds=hardware_info_monitor_job_time),
    },
    'GithubCveMonitor': {
        'task': 'Web.CVE.GithubMonitoring.Github.Monitor',
        'schedule': timedelta(seconds=github_cve_monitor_job_time),
    },
    'NistMonitor': {
        'task': 'Web.CVE.NistMonitoring.NistUpdate.NistUpdateDownload',
        'schedule': timedelta(seconds=nist_update_job_time),
    },
}
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)