Exemplo n.º 1
0
 def test_crontab_spec_hour_formats(self):
     c = crontab(hour=6)
     self.assertEqual(c.hour, set([6]))
     c = crontab(hour='5')
     self.assertEqual(c.hour, set([5]))
     c = crontab(hour=(4, 8, 12))
     self.assertEqual(c.hour, set([4, 8, 12]))
Exemplo n.º 2
0
def setup_periodic_tasks(sender, **kwargs):
    sender.add_periodic_task(
        3600,
        billing_check.s(),
        name='billing-check',
    )
    sender.add_periodic_task(
        3600 * 24,
        billing_alert.s(),
        name='billing-alert',
    )
    sender.add_periodic_task(
        3600 * 24,
        billing_notify.s(),
        name='billing-notify',
    )
    sender.add_periodic_task(
        crontab(hour=1, minute=0, day_of_week='monday,thursday'),
        perform_removal.s(),
        name='perform-removal',
    )
    sender.add_periodic_task(
        crontab(hour=2, minute=0, day_of_week='monday,thursday'),
        schedule_removal.s(),
        name='schedule-removal',
    )
    sender.add_periodic_task(
        crontab(hour=2, minute=30, day_of_week='monday,thursday'),
        notify_expired.s(),
        name='notify-expired',
    )
Exemplo n.º 3
0
def conf_frontend_celery(app):
    broker = get_frontend_broker_uri()
    queue = frontend_config.broker_frontend.queue
    _conf_celery(app, broker, queue=queue)
    # add celerybeat conf only for frontend app
    cron_age_cfg = frontend_config['cron_clean_file_age']
    cron_size_cfg = frontend_config['cron_clean_file_size']
    app.conf.update(
        CELERYBEAT_SCHEDULE={
            # File System clean according to file max age
            'clean_fs_age': {
                'task': 'frontend_app.clean_fs_age',
                'schedule': crontab(
                    hour=cron_age_cfg['clean_fs_age_cron_hour'],
                    minute=cron_age_cfg['clean_fs_age_cron_minute'],
                    day_of_week=cron_age_cfg['clean_fs_age_cron_day_of_week']
                ),
                'args': (),
            },
            # File System clean according to sum max size
            'clean_fs_size': {
                'task': 'frontend_app.clean_fs_size',
                'schedule': crontab(
                    hour=cron_size_cfg['clean_fs_size_cron_hour'],
                    minute=cron_size_cfg['clean_fs_size_cron_minute'],
                    day_of_week=cron_size_cfg['clean_fs_size_cron_day_of_week']
                ),
                'args': (),
            },
        },
        CELERY_TIMEZONE='UTC'
    )
Exemplo n.º 4
0
    def __init__(self, record):
        self.name = record.name
        self.task = record.task
        if record.interval:
            i = record.interval
            self.schedule = schedules.schedule(timedelta(**{i.period: i.every}))
        else:
            i = record.crontab
            schedules.crontab(minute=i.minute,
                              hour=i.hour,
                              day_of_week=i.day_of_week)
        try:
            self.args = deserialize(record.args or u"[]")
            self.kwargs = deserialize(record.kwargs or u"{}")
        except ValueError:
            record.update_record(no_changes = True, enabled = False)
            db(db.celery_periodictasks).update(last_update=datetime.now())
            db.commit()
            raise

        self.options = {"queue": record.queue,
                        "exchange": record.exchange,
                        "routing_key": record.routing_key,
                        "expires": record.expires}
        self.total_run_count = record.total_run_count or 0
        self.record = record        
        if not record.last_run_at:
            record.update_record(last_run_at = datetime.now())
            db(db.celery_periodictasks).update(last_update=datetime.now())
            db.commit()
        self.last_run_at = record.last_run_at
Exemplo n.º 5
0
 def test_crontab_spec_dom_formats(self):
     c = crontab(day_of_month=5)
     self.assertEqual(c.day_of_month, set([5]))
     c = crontab(day_of_month='5')
     self.assertEqual(c.day_of_month, set([5]))
     c = crontab(day_of_month='2,4,6')
     self.assertEqual(c.day_of_month, set([2, 4, 6]))
     c = crontab(day_of_month='*/5')
     self.assertEqual(c.day_of_month, set([1, 6, 11, 16, 21, 26, 31]))
Exemplo n.º 6
0
 def test_crontab_spec_minute_formats(self):
     c = crontab(minute=30)
     self.assertEqual(c.minute, set([30]))
     c = crontab(minute='30')
     self.assertEqual(c.minute, set([30]))
     c = crontab(minute=(30, 40, 50))
     self.assertEqual(c.minute, set([30, 40, 50]))
     c = crontab(minute=set([30, 40, 50]))
     self.assertEqual(c.minute, set([30, 40, 50]))
Exemplo n.º 7
0
def setup_periodic_tasks(sender, **kwargs):
    sender.add_periodic_task(
        3600,
        commit_pending.s(),
        name='commit-pending',
    )
    sender.add_periodic_task(
        crontab(hour=3, minute=30),
        update_remotes.s(),
        name='update-remotes',
    )
    sender.add_periodic_task(
        3600 * 24,
        repository_alerts.s(),
        name='repository-alerts',
    )
    sender.add_periodic_task(
        3600 * 24,
        component_alerts.s(),
        name='component-alerts',
    )
    sender.add_periodic_task(
        3600 * 24,
        cleanup_suggestions.s(),
        name='suggestions-cleanup',
    )
    sender.add_periodic_task(
        3600 * 24,
        cleanup_stale_repos.s(),
        name='cleanup-stale-repos',
    )
    sender.add_periodic_task(
        3600 * 24,
        cleanup_old_suggestions.s(),
        name='cleanup-old-suggestions',
    )
    sender.add_periodic_task(
        3600 * 24,
        cleanup_old_comments.s(),
        name='cleanup-old-comments',
    )

    # Following fulltext maintenance tasks should not be
    # executed at same time
    sender.add_periodic_task(
        crontab(hour=2, minute=30, day_of_week='saturday'),
        cleanup_fulltext.s(),
        name='fulltext-cleanup',
    )
    sender.add_periodic_task(
        crontab(hour=2, minute=30, day_of_week='sunday'),
        optimize_fulltext.s(),
        name='fulltext-optimize',
    )
Exemplo n.º 8
0
 def test_crontab_spec_moy_formats(self):
     c = crontab(month_of_year=1)
     self.assertEqual(c.month_of_year, set([1]))
     c = crontab(month_of_year='1')
     self.assertEqual(c.month_of_year, set([1]))
     c = crontab(month_of_year='2,4,6')
     self.assertEqual(c.month_of_year, set([2, 4, 6]))
     c = crontab(month_of_year='*/2')
     self.assertEqual(c.month_of_year, set([1, 3, 5, 7, 9, 11]))
     c = crontab(month_of_year='2-12/2')
     self.assertEqual(c.month_of_year, set([2, 4, 6, 8, 10, 12]))
Exemplo n.º 9
0
def init_app(app):
    celery.main = app.import_name
    celery.conf.update(app.config)

    celery.conf['CELERYBEAT_SCHEDULE'] = {
        'bump-metrics-every-nights': {
            'task': 'bump-metrics',
            'schedule': crontab(hour=0, minute=0),
        },
        'purge-datasets-every-nights': {
            'task': 'purge-datasets',
            'schedule': crontab(hour=0, minute=0),
        },
        'purge-reuses-every-nights': {
            'task': 'purge-reuses',
            'schedule': crontab(hour=0, minute=0),
        },
        'purge-organizations-every-nights': {
            'task': 'purge-organizations',
            'schedule': crontab(hour=0, minute=0),
        },
    }

    TaskBase = celery.Task

    class ContextTask(TaskBase):
        abstract = True

        def __call__(self, *args, **kwargs):
            with app.app_context():
                return TaskBase.__call__(self, *args, **kwargs)
    celery.Task = ContextTask

    # Load core tasks
    import udata.core.metrics.tasks
    import udata.core.storages.tasks
    # import udata.core.search.tasks
    import udata.core.activity.tasks
    import udata.core.dataset.tasks
    import udata.core.reuse.tasks
    import udata.core.organization.tasks

    # Load plugins tasks
    for plugin in app.config['PLUGINS']:
        name = 'udata.ext.{0}.tasks'.format(plugin)
        try:
            __import__(name)
        except ImportError:
            pass
        except Exception as e:
            log.error('Error importing %s: %s', name, e)

    return celery
Exemplo n.º 10
0
Arquivo: core.py Projeto: pudo/aleph
def create_app(config={}):
    app = Flask('aleph')
    app.config.from_object(settings)
    app.config.update(config)

    if 'postgres' not in settings.DATABASE_URI:
        raise RuntimeError("aleph database must be PostgreSQL!")

    app.config.update({
        'SQLALCHEMY_DATABASE_URI': settings.DATABASE_URI,
        'BABEL_DOMAIN': 'aleph'
    })

    queue = Queue(settings.QUEUE_NAME,
                  routing_key=settings.QUEUE_ROUTING_KEY,
                  queue_arguments={'x-max-priority': 9})
    celery.conf.update(
        imports=('aleph.queues'),
        broker_url=settings.BROKER_URI,
        task_always_eager=settings.EAGER,
        task_eager_propagates=True,
        task_ignore_result=True,
        task_acks_late=True,
        task_queues=(queue,),
        task_default_queue=settings.QUEUE_NAME,
        task_default_routing_key=settings.QUEUE_ROUTING_KEY,
        worker_max_tasks_per_child=1000,
        result_persistent=False,
        beat_schedule={
            'hourly': {
                'task': 'aleph.logic.scheduled.hourly',
                'schedule': crontab(hour='*', minute=0)
            },
            'daily': {
                'task': 'aleph.logic.scheduled.daily',
                'schedule': crontab(hour=5, minute=0)
            }
        },
    )

    migrate.init_app(app, db, directory=settings.ALEMBIC_DIR)
    configure_oauth(app)
    mail.init_app(app)
    db.init_app(app)
    babel.init_app(app)
    CORS(app, origins=settings.CORS_ORIGINS)

    # This executes all registered init-time plugins so that other
    # applications can register their behaviour.
    for plugin in get_extensions('aleph.init'):
        plugin(app=app)
    return app
Exemplo n.º 11
0
    def test_run_all_due_tasks_after_restart(self):
        scheduler_class, shelve = create_persistent_scheduler_w_call_logging()

        shelve['tz'] = 'UTC'
        shelve['utc_enabled'] = True
        shelve['__version__'] = __version__
        cur_seconds = 20

        def now_func():
            return datetime(2018, 1, 1, 1, 11, cur_seconds)
        app_schedule = {
            'first_missed': {'schedule': crontab(
                minute='*/10', nowfun=now_func), 'task': 'first_missed'},
            'second_missed': {'schedule': crontab(
                minute='*/1', nowfun=now_func), 'task': 'second_missed'},
            'non_missed': {'schedule': crontab(
                minute='*/13', nowfun=now_func), 'task': 'non_missed'}
        }
        shelve['entries'] = {
            'first_missed': beat.ScheduleEntry(
                'first_missed', 'first_missed',
                last_run_at=now_func() - timedelta(minutes=2),
                total_run_count=10,
                schedule=app_schedule['first_missed']['schedule']),
            'second_missed': beat.ScheduleEntry(
                'second_missed', 'second_missed',
                last_run_at=now_func() - timedelta(minutes=2),
                total_run_count=10,
                schedule=app_schedule['second_missed']['schedule']),
            'non_missed': beat.ScheduleEntry(
                'non_missed', 'non_missed',
                last_run_at=now_func() - timedelta(minutes=2),
                total_run_count=10,
                schedule=app_schedule['non_missed']['schedule']),
        }

        self.app.conf.beat_schedule = app_schedule

        scheduler = scheduler_class(self.app)

        max_iter_number = 5
        for i in range(max_iter_number):
            delay = scheduler.tick()
            if delay > 0:
                break
        assert {'first_missed', 'second_missed'} == {
            item['task'] for item in scheduler.sent}
        # ensure next call on the beginning of next min
        assert abs(60 - cur_seconds - delay) < 1
Exemplo n.º 12
0
 def test_eq(self):
     self.assertEqual(crontab(day_of_week="1, 2"),
                      crontab(day_of_week="1-2"))
     self.assertEqual(crontab(minute="1", hour="2", day_of_week="5"),
                      crontab(minute="1", hour="2", day_of_week="5"))
     self.assertNotEqual(crontab(minute="1"), crontab(minute="2"))
     self.assertFalse(object() == crontab(minute="1"))
     self.assertFalse(crontab(minute="1") == object())
Exemplo n.º 13
0
    def test_crontab(self):
        d = self.crontab()

        result = self.loads(json.dumps(d))

        d.pop('__type__')
        self.assertEqual(result, crontab())
Exemplo n.º 14
0
Arquivo: tick.py Projeto: skrul/SnapUp
def tick():
    metrics = db.session.query(models.Metric).all()
    for metric in metrics:
        cron = schedules.crontab(
            minute=metric.cron_minute or '*',
            hour=metric.cron_hour or '*',
            day_of_week=metric.cron_dow or '*',
            day_of_month=metric.cron_dom or '*',
            month_of_year=metric.cron_moy or '*')

        last_run = metric.last_run
        if not last_run:
            last_run = datetime.datetime.fromtimestamp(0)

        is_due, next_time_to_run = cron.is_due(last_run)
        if is_due:
            fn = None
            if metric.source.source_type.name == 'google analytics':
                fn = ga.ga
            elif metric.source.source_type.name == 'database':
                fn = database.database
            else:
                raise Exception('bad source type')

            fn.delay(metric.id, datetime.datetime.utcnow(), None)
            metric.last_run = datetime.datetime.utcnow()
    db.session.commit()
Exemplo n.º 15
0
 def test_monthday_not_weekdaymonthyear(self):
     next = self.next_ocurrance(crontab(minute=[5, 42],
                                        day_of_week="mon",
                                        day_of_month=29,
                                        month_of_year="2-4"),
                                datetime(2010, 1, 29, 0, 5, 15))
     self.assertEqual(next, datetime(2010, 3, 29, 0, 5))
Exemplo n.º 16
0
 def test_default_crontab_spec(self):
     c = crontab()
     self.assertEqual(c.minute, set(range(60)))
     self.assertEqual(c.hour, set(range(24)))
     self.assertEqual(c.day_of_week, set(range(7)))
     self.assertEqual(c.day_of_month, set(range(1, 32)))
     self.assertEqual(c.month_of_year, set(range(1, 13)))
Exemplo n.º 17
0
 def test_not_weekmonthdayyear(self):
     next = self.next_ocurrance(crontab(minute=[5, 42],
                                        day_of_week='fri,sat',
                                        day_of_month=29,
                                        month_of_year='2-10'),
                                datetime(2010, 1, 28, 14, 30, 15))
     self.assertEqual(next, datetime(2010, 5, 29, 0, 5))
Exemplo n.º 18
0
def setup_periodic_tasks(sender, **kwargs):
    # Run ssdeep match analytic
    # Executes every morning at 2:00 a.m.
    sender.add_periodic_task(
        crontab(hour=2, minute=0),
        ssdeep_compare_celery.s(),
    )

    # Delete old metricbeat indices
    # Executes every morning at 3:00 a.m.
    metricbeat_enabled = es_storage_config.get('metricbeat_enabled', True)
    if metricbeat_enabled:
        sender.add_periodic_task(
            crontab(hour=3, minute=0),
            metricbeat_rollover.s(days=es_storage_config.get('metricbeat_rollover_days')),
        )
Exemplo n.º 19
0
 def test_weekday_monthday(self):
     next = self.next_ocurrance(crontab(minute=30,
                                        hour=14,
                                        day_of_week='mon',
                                        day_of_month=18),
                                datetime(2010, 1, 18, 14, 30, 15))
     self.assertEqual(next, datetime(2010, 10, 18, 14, 30))
Exemplo n.º 20
0
 def schedule(self):
     return schedules.crontab(
             minute=self.minute,
             hour=self.hour,
             day_of_week=self.day_of_week,
             day_of_month=self.day_of_month,
             month_of_year=self.month_of_year)
Exemplo n.º 21
0
 def test_scheduled_task(self):
     tq = TaskQueue('test')
     tq.bind_redis(self.conn1)
     kw = {
         'request': {'method': 'GET',
                     'url': 'http://httpbin.org'},
         'schedule': schedules.crontab('*/10', '1,2-10')
     }
     self.assertRaises(TaskCNameRequired, tq.add_task, **kw)
     kw['cname'] = 'crontest'
     task00 = tq.add_task(**kw)
     metakey = tq._TaskQueue__metakey(task00['id'])
     self.assertEqual(not_bytes(self.conn1.hget(metakey, 'schedule')),
                      '"*/10 1,2-10 * * *"')
     kw['schedule'] = schedules.schedule(30)
     kw['cname'] = 'schedtest'
     task01 = tq.add_task(**kw)
     metakey = tq._TaskQueue__metakey(task01['id'])
     self.assertEqual(not_bytes(self.conn1.hget(metakey, 'schedule')),
                      '"every 30.0 seconds"')
     task10 = tq.get_task_by_cname('crontest')
     self.assertEqual(task00, task10)
     task11 = tq.get_task_by_cname('schedtest')
     self.assertEqual(task01, task11)
     task = tq._get_task_by_cname('schedtest')
     task.dispatch()
     task21 = tq.get_task_by_cname('schedtest')
     self.assertNotEqual(task11, task21)
     now = utcnow()
     self.assertTrue(now - timedelta(5) < task21['last_run_at'] < now)
     task11.pop('last_run_at')
     task11.pop('uuid')
     task21.pop('last_run_at')
     task21.pop('uuid')
     self.assertEqual(task11, task21)
Exemplo n.º 22
0
    def __init__(self, slug, label, func, minute='*', hour='*', day_of_week='*', day_of_month='*', month_of_year='*'):
        self.slug = slug
        self.label = label
        self.func = func

        self.schedule = crontab(
            minute=minute, hour=hour, day_of_week=day_of_week,
            day_of_month=day_of_month, month_of_year=month_of_year,
        )

        app.conf.CELERYBEAT_SCHEDULE.update(
            {
                self.get_task_name(): {
                    'task': 'mayan_statistics.tasks.task_execute_statistic',
                    'schedule': self.schedule,
                    'args': (self.slug,)
                },
            }
        )

        app.conf.CELERY_ROUTES.update(
            {
                self.get_task_name(): {
                    'queue': 'statistics'
                },
            }
        )

        self.__class__._registry[slug] = self
Exemplo n.º 23
0
 def test_monthyear_not_weekmonthday(self):
     next = self.next_ocurrance(crontab(minute=[5, 42],
                                        day_of_week='mon',
                                        day_of_month=29,
                                        month_of_year='2-4'),
                                datetime(2010, 2, 28, 0, 5, 15))
     self.assertEqual(next, datetime(2010, 3, 29, 0, 5))
Exemplo n.º 24
0
    def setup_scheduler(self, app):
        self.app = app
        self.app.conf.beat_schedule = {}

        self.m1 = self.create_model_interval(
            schedule(timedelta(seconds=10)))
        self.m1.save()
        self.m1.refresh_from_db()

        self.m2 = self.create_model_interval(
            schedule(timedelta(minutes=20)))
        self.m2.save()
        self.m2.refresh_from_db()

        self.m3 = self.create_model_crontab(
            crontab(minute='2,4,5'))
        self.m3.save()
        self.m3.refresh_from_db()

        self.m4 = self.create_model_solar(
            solar('solar_noon', 48.06, 12.86))
        self.m4.save()
        self.m4.refresh_from_db()

        # disabled, should not be in schedule
        m5 = self.create_model_interval(
            schedule(timedelta(seconds=1)))
        m5.enabled = False
        m5.save()

        self.s = self.Scheduler(app=self.app)
Exemplo n.º 25
0
 def test_weekday_not_monthdayyear(self):
     next = self.next_ocurrance(crontab(minute=[5, 42],
                                        day_of_week='mon',
                                        day_of_month=18,
                                        month_of_year='2-10'),
                                datetime(2010, 1, 11, 0, 5, 15))
     self.assertEqual(next, datetime(2010, 10, 18, 0, 5))
Exemplo n.º 26
0
 def test_simple_crontab_spec(self):
     c = crontab(minute=30)
     self.assertEqual(c.minute, set([30]))
     self.assertEqual(c.hour, set(range(24)))
     self.assertEqual(c.day_of_week, set(range(7)))
     self.assertEqual(c.day_of_month, set(range(1, 32)))
     self.assertEqual(c.month_of_year, set(range(1, 13)))
Exemplo n.º 27
0
def includeme(config):
    # Register whatever file storage backend has been configured for storing
    # our package files.
    storage_class = config.maybe_dotted(
        config.registry.settings["files.backend"],
    )
    config.register_service_factory(storage_class.create_service, IFileStorage)

    # Register our service which will handle get the download statistics for
    # a project.
    config.register_service(
        RedisDownloadStatService(
            config.registry.settings["download_stats.url"],
        ),
        IDownloadStatService,
    )

    # Register our origin cache keys
    config.register_origin_cache_keys(
        Project,
        cache_keys=["project/{obj.normalized_name}"],
        purge_keys=["project/{obj.normalized_name}", "all-projects"],
    )
    config.register_origin_cache_keys(
        Release,
        cache_keys=["project/{obj.project.normalized_name}"],
        purge_keys=["project/{obj.project.normalized_name}", "all-projects"],
    )

    # Add a periodic task to compute trending once a day, assuming we have
    # been configured to be able to access BigQuery.
    if config.get_settings().get("warehouse.trending_table"):
        config.add_periodic_task(crontab(minute=0, hour=3), compute_trending)
Exemplo n.º 28
0
def includeme(config):
    # Register whatever file storage backend has been configured for storing
    # our package files.
    files_storage_class = config.maybe_dotted(config.registry.settings["files.backend"])
    config.register_service_factory(files_storage_class.create_service, IFileStorage)

    docs_storage_class = config.maybe_dotted(config.registry.settings["docs.backend"])
    config.register_service_factory(docs_storage_class.create_service, IDocsStorage)

    # Register our origin cache keys
    config.register_origin_cache_keys(
        File,
        cache_keys=["project/{obj.release.project.normalized_name}"],
        purge_keys=[key_factory("project/{obj.release.project.normalized_name}")],
    )
    config.register_origin_cache_keys(
        Project,
        cache_keys=["project/{obj.normalized_name}"],
        purge_keys=[
            key_factory("project/{obj.normalized_name}"),
            key_factory("user/{itr.username}", iterate_on="users"),
            key_factory("all-projects"),
        ],
    )
    config.register_origin_cache_keys(
        Release,
        cache_keys=["project/{obj.project.normalized_name}"],
        purge_keys=[
            key_factory("project/{obj.project.normalized_name}"),
            key_factory("user/{itr.username}", iterate_on="project.users"),
            key_factory("all-projects"),
        ],
    )
    config.register_origin_cache_keys(
        Role,
        purge_keys=[
            key_factory("user/{obj.user.username}"),
            key_factory("project/{obj.project.normalized_name}"),
        ],
    )
    config.register_origin_cache_keys(User, cache_keys=["user/{obj.username}"])
    config.register_origin_cache_keys(
        User.name,
        purge_keys=[
            key_factory("user/{obj.username}"),
            key_factory("project/{itr.normalized_name}", iterate_on="projects"),
        ],
    )
    config.register_origin_cache_keys(
        Email.primary,
        purge_keys=[
            key_factory("user/{obj.user.username}"),
            key_factory("project/{itr.normalized_name}", iterate_on="user.projects"),
        ],
    )

    # Add a periodic task to compute trending once a day, assuming we have
    # been configured to be able to access BigQuery.
    if config.get_settings().get("warehouse.trending_table"):
        config.add_periodic_task(crontab(minute=0, hour=3), compute_trending)
Exemplo n.º 29
0
 def test_monthdayyear_not_week(self):
     next = self.next_ocurrance(crontab(minute=[5, 42],
                                        day_of_week='wed,thu',
                                        day_of_month=29,
                                        month_of_year='1,4,7'),
                                datetime(2010, 1, 29, 14, 30, 15))
     self.assertEqual(next, datetime(2010, 4, 29, 0, 5))
Exemplo n.º 30
0
 def test_crontab_spec_invalid_dow(self):
     with self.assertRaises(ValueError):
         crontab(day_of_week='fooday-barday')
     with self.assertRaises(ValueError):
         crontab(day_of_week='1,4,foo')
     with self.assertRaises(ValueError):
         crontab(day_of_week='7')
     with self.assertRaises(ValueError):
         crontab(day_of_week='12')
Exemplo n.º 31
0
CELERY_IMPORTS = config['celery']['tasks']
BROKER_URL = config['heatmaps']['transport']+\
    "://"+brokerHostStr(config['heatmaps']['broker'])+\
    "/"+str(config['heatmaps']['broker']['database'])

CELERY_RESULT_BACKEND = config['heatmaps']['transport']
CELERY_MONGODB_BACKEND_SETTINGS = config['heatmaps']['broker']
CELERY_ACCEPT_CONTENT = ['pickle', 'json']

## periodic tasks
CELERYBEAT_SCHEDULE = {}
if config["heatmaps"]["enabled"]:
    CELERYBEAT_SCHEDULE['heat-queries'] = {
        'task': 'metrilyx.celerytasks.run_heat_queries',
        'schedule': crontab(minute='*/1'),
        #'args': (1,2),
        #'options': { 'task_id': '' }
        }
        
if config["cache"]["enabled"]:
    CELERYBEAT_SCHEDULE['metric-cacher'] = {
        'task': 'metrilyx.celerytasks.cache_metrics',
        'schedule': crontab(minute=str("*/%d" %(config['cache']['interval'])))
    }
    ## TODO: re-work logic
    #CELERYBEAT_SCHEDULE['metric-cache-expirer'] = {
    #    'task': 'metrilyx.celerytasks.expire_metrics_cache',
    #    'schedule': crontab(minute=str("*/%d" %(config['cache']['retention_period'])))
    #}
Exemplo n.º 32
0
if not TESTING:
    CELERY_BROKER_URL = os.getenv('REDIS_URL', f'redis://{redis_host}/1')
else:
    OPENWISP_RADIUS_GROUPCHECK_ADMIN = True
    OPENWISP_RADIUS_GROUPREPLY_ADMIN = True
    OPENWISP_RADIUS_USERGROUP_ADMIN = True
    CELERY_TASK_ALWAYS_EAGER = True
    CELERY_TASK_EAGER_PROPAGATES = True
    CELERY_BROKER_URL = 'memory://'

TEST_RUNNER = 'openwisp_utils.tests.TimeLoggingTestRunner'

CELERY_BEAT_SCHEDULE = {
    'deactivate_expired_users': {
        'task': 'openwisp_radius.tasks.cleanup_stale_radacct',
        'schedule': crontab(hour=0, minute=0),
        'args': None,
        'relative': True,
    },
    'delete_old_users': {
        'task': 'openwisp_radius.tasks.delete_old_users',
        'schedule': crontab(hour=0, minute=10),
        'args': [365],
        'relative': True,
    },
    'cleanup_stale_radacct': {
        'task': 'openwisp_radius.tasks.cleanup_stale_radacct',
        'schedule': crontab(hour=0, minute=20),
        'args': [365],
        'relative': True,
    },
Exemplo n.º 33
0
        },
    },
    'schedule-deletions': {
        'task': 'sentry.tasks.deletion.run_scheduled_deletions',
        'schedule': timedelta(minutes=15),
        'options': {
            'expires': 60 * 25,
        },
    },
    'schedule-weekly-organization-reports': {
        'task':
        'sentry.tasks.reports.prepare_reports',
        'schedule':
        crontab(
            minute=0,
            hour=12,  # 05:00 PDT, 09:00 EDT, 12:00 UTC
            day_of_week='monday',
        ),
        'options': {
            'expires': 60 * 60 * 3,
        },
    },
    'schedule-vsts-integration-subscription-check': {
        'task': 'sentry.tasks.integrations.kickoff_vsts_subscription_check',
        'schedule': timedelta(hours=6),
        'options': {
            'expires': 60 * 25,
        }
    }
}
Exemplo n.º 34
0
from celery.schedules import crontab
from celery.task import periodic_task
from celery.utils.log import get_task_logger

from dimagi.utils.dates import DateSpan

from corehq.apps.data_analytics.gir_generator import GIRTableGenerator
from corehq.apps.data_analytics.malt_generator import MALTTableGenerator
from corehq.util.log import send_HTML_email
from corehq.util.soft_assert import soft_assert

logger = get_task_logger(__name__)


@periodic_task(queue='background_queue',
               run_every=crontab(hour=1, minute=0, day_of_month='2'),
               acks_late=True,
               ignore_result=True)
def build_last_month_MALT():
    def _last_month_datespan():
        today = datetime.date.today()
        first_of_this_month = datetime.date(day=1,
                                            month=today.month,
                                            year=today.year)
        last_month = first_of_this_month - datetime.timedelta(days=1)
        return DateSpan.from_month(last_month.month, last_month.year)

    last_month = _last_month_datespan()
    generator = MALTTableGenerator([last_month])
    generator.build_table()
Exemplo n.º 35
0
# Create your tasks here
from __future__ import absolute_import, unicode_literals
from celery import shared_task

from celery.schedules import crontab
from celery.task import periodic_task

@periodic_task(run_every=crontab(hour=14, minute=25))
def every_day_midnight():
    print("This is run every Monday morning at 7:30")

Exemplo n.º 36
0
            for i in range(len(picture_soup)):
                data_photo_url = picture_soup[i]["href"]
                list_of_pictures.append(data_photo_url)
        except Exception as e:
            sys.stdout.write("Does not contain any images")
            sys.stdout.write(str(e))
        if len(list_of_pictures) == 0:
            list_of_pictures.append("no images")

        # sys.stdout.write("I ended to get data from the site, now i will put it to database")
        # Put data into db
        self.func_list.get(self.help_model_name, None)(id, characteristics, advert_title, url, phones_data, city, price,
                                                       text_data, list_of_pictures)


@periodic_task(run_every=crontab(hour=3, minute=00, day_of_week='tue,fri'))
def main():
    print("Print from Scrape main")
    ms = MainScrape()
    redis_shops = redis.Redis(host='redis', port=6379, db=0)
    rediska = redis.Redis(host='redis', port=6379, db=1)
    redis_list = [redis_shops, rediska]
    for rediska in redis_list:
        for link_id in rediska.keys():
            link = os.path.join('/a/show', link_id.decode("utf-8"))
            try:
                help_model_name = rediska.get(link_id.decode("utf-8"))
                print("Before first ERRRR...")
                if_exists = appconf.CORRESPOND_MODELS[help_model_name.decode("utf-8")].objects.filter \
                    (link_id=link_id.decode("utf-8")).exists()
            except Exception as e:
Exemplo n.º 37
0
        "ibutsu_server.tasks.db",
        "ibutsu_server.tasks.importers",
        "ibutsu_server.tasks.reports",
        "ibutsu_server.tasks.results",
        "ibutsu_server.tasks.runs",
    ],
)
app.config_from_object(settings)
app.Task = IbutsuTask
# Shortcut for the decorator
task = app.task
# Add in any periodic tasks
app.conf.beat_schedule = {
    "prune-old-artifact-files": {
        "task": "ibutsu_server.tasks.db.prune_old_files",
        "schedule": crontab(minute=0, hour=4,
                            day_of_week=6),  # 4 am on Saturday, after DB dump
        "args": (3, ),  # delete any artifact file older than 3 months
    }
}


@signals.task_failure.connect
def retry_task_on_exception(*args, **kwargs):
    """Retry a task automatically when it fails"""
    task = kwargs.get("sender")
    einfo = kwargs.get("einfo")
    logging.warning("Uncaught exception: %r for task %s", einfo, task)
    # Incremental backoff, starts at a minute and maxes out at 1 hour.
    backoff = min(2**task.request.retries, 3600)
    task.retry(countdown=backoff)
Exemplo n.º 38
0
logger = get_task_logger(__name__)

bstamp = Bitstamp()
satang = Satang()
line = Line()
tg = Telegram()
fixer = Fixer()

mckee_app.conf.beat_schedule = {
    """
    Check the rate spread every 10 minutes
    Update the fx rate every hour
    """
    'check-rate-spread': {
        'task': 'mckee_tasks.rate_spread',
        'schedule': crontab(minute='*/5', hour='*', day_of_week='*'),
    },
    'check-forex-rate': {
        'task': 'mckee_tasks.fx_rate',
        'schedule': crontab(minute='01', hour='*', day_of_week='*'),
    },
}


@mckee_app.task(name='mckee_tasks.fx_rate')
def fx_rate():
    """
    Keeps the global xe rate variable up to date for the calc_spread task to use
    Uses the fixer.io free tier forex API which only has EUR as the base pair so we need to do some division

    :return: 31.019741608682857
Exemplo n.º 39
0
    'backend': 'celery_once.backends.Redis',
    'settings': {
        'url': env('REDIS'),
        'default_timeout': 10 * 60,
        'blocking': CELERY_ALWAYS_EAGER,
    },
}

BROKER_URL = env('CELERY_BACKEND')

CELERY_ROUTES = {}

CELERYBEAT_SCHEDULE = {
    'notify_moderator_by_email': {
        'task': 'works.tasks.notify_moderator_by_email',
        'schedule': crontab(hour='*/1'),
    },
}

# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators

AUTH_PASSWORD_VALIDATORS = [
    {
        'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
    },
    {
        'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
    },
    {
        'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
Exemplo n.º 40
0
app = Celery('neksflis')

# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')

# Load task modules from all registered Django app configs.
app.autodiscover_tasks()

app.conf.beat_schedule = {
    'deactivate-unsubscribed-users': {
        'task': 'neksflis.account.tasks.deactivate_unsubscribed_users',
        'schedule': crontab(minute=0, hour=0),
        'args': (),
    },
    'charge-subscriptions': {
        'task': 'neksflis.subscription.tasks.charge_subscriptions',
        'schedule': crontab(minute=0, hour='*/6'),
        'args': (),
    },
    'create-new-period-items': {
        'task': 'neksflis.subscription.tasks.create_new_period_items',
        'schedule': crontab(minute=0, hour='*/3'),
        'args': (),
    },
    'cancel-unpaid-subscriptions': {
        'task': 'neksflis.subscription.tasks.cancel_unpaid_subscriptions',
        'schedule': crontab(minute=0, hour='*/6'),
Exemplo n.º 41
0
CELERY_QUEUES = {
    'default': {
        "exchange": "default",
        "binding_key": "default",
    },
    'log_loader_queue': {
        'exchange': 'log_loader_queue',
        'routing_key': 'log_loader_queue',
    },
}

CELERYBEAT_SCHEDULE = {
    # crontab(hour=0, minute=0, day_of_week='saturday')
    'CH_get_stat':{  # example: 'file-backup' 
   'task': 'api.tasks.task_log_loader_main',  # example: 'files.tasks.cleanup' 
   'schedule': crontab(minute='*/3'),
   #'args': (),
   'options': {'queue': 'log_loader_queue'},
    },'ad_stat_loader':{
                        'task':'api.tasks.task_adstat_loader',
                        'schedule':crontab(minute='0',hour='2'),
                        'options': {'queue': 'log_loader_queue'}},
    'CH_get_stat_test':{  # example: 'file-backup'
                        'task': 'api.tasks_test.task_log_loader_main_test',  # example: 'files.tasks.cleanup'
                        'schedule': crontab(minute='*/3'),
                        #'args': (),
                        'options': {'queue': 'log_loader_queue'},
        },'ad_stat_loader_test':{
                        'task':'api.tasks_test.task_adstat_loader_test',
                        'schedule':crontab(minute='0',hour='*/3'),
                        'options': {'queue': 'log_loader_queue'}},
Exemplo n.º 42
0
    #     'ENGINE': 'django.db.backends.postgresql_psycopg2',
    #     'NAME': 'students',
    #     'USER': '******',
    #     'PASSWORD': '******',
    #     'HOST': 'localhost',
    #     'PORT': '5432',
    # },
}

CELERY_BROKER_URL = 'amqp://localhost'

from celery.schedules import crontab
CELERY_BEAT_SCHEDULE = {
    'beat': {
        'task': 'students.tasks.beat',
        'schedule': crontab(minute='*/1'),
    }
}

# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators

AUTH_PASSWORD_VALIDATORS = [
    {
        'NAME':
        'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
    },
    {
        'NAME':
        'django.contrib.auth.password_validation.MinimumLengthValidator',
    },
Exemplo n.º 43
0
# API Documentation configuration
SWAGGER_SETTINGS = {
    'DEFAULT_AUTO_SCHEMA_CLASS': 'vng.utils.schema.CompoundTagsSchema',
}

# User registration settings
ACCOUNT_ACTIVATION_DAYS = 7
REGISTRATION_FORM = 'vng.utils.forms.RegistrationCaptcha'
SILENCED_SYSTEM_CHECKS = ['captcha.recaptcha_test_key_error']

RECAPTCHA_PUBLIC_KEY = '6LfwbaoUAAAAAJ7Bl5o-7pe9DKluPOLX-URNB821'
RECAPTCHA_PRIVATE_KEY = os.getenv('RECAPTCHA_PRIVATE_KEY', 'default')

CELERY_BEAT_SCHEDULE = {
    'task-number-one': {
        'task': 'vng.testsession.task.purge_sessions',
        'schedule': crontab(hour=0, minute=0),
    },
    'scheduled-test-provider': {
        'task': 'vng.servervalidation.task.execute_test_scheduled',
        'schedule': crontab(hour=0, minute=0),
    },
}

# Elastic APM
ELASTIC_APM = {
    'SERVICE_NAME': 'VNG API-Testplatform',
    'SECRET_TOKEN': os.getenv('ELASTIC_APM_SECRET_TOKEN', 'default'),
    'SERVER_URL': os.getenv('ELASTIC_APM_SERVER_URL', 'http://example.com'),
}
Exemplo n.º 44
0
import os
from celery import Celery
from celery.schedules import crontab
from datetime import timedelta

os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
app = Celery('our_book')

app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()

@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r'.format(self.request))

app.conf.update(
    CELERY_TIMEZONE='Asia/Seoul',
    CELERYBEAT_SCHEDULE={
        'send_email_overdue_notification': {
            'task': 'accounts.tasks.send_email_overdue_notification',
            'schedule': crontab(minute=0, hour=11),
            'args': ()
        },
        'send_email_duedate_notification': {
            'task': 'accounts.tasks.send_email_return_date_notification',
            'schedule': crontab(minute=0, hour=15),
            'args': ()
        }
    }
)
Exemplo n.º 45
0
    },
    # NOTE: pipeline must run on the same worker node as `db`
    'pipeline.*': {
        'queue': 'pipeline'
    },
}
BASE_CONDA_PATH = pathlib.Path('/data/qiime2')
GITHUB_TOKEN = env('GITHUB_TOKEN', default='')
# Don't forget to update local.py when changing here
TASK_TIMES = {
    '03_MIN': 60 * 3,
    '05_MIN': 60 * 5,
    '10_MIN': 60 * 10,
    '90_MIN': 60 * 90,
    '02_HR': 60 * 60 * 2,
    '4A_CRON': crontab(minute=0, hour=4),  # daily at 4a
    'HRLY_CRON': crontab(minute=23),  # hourly
}


def generate_beat_schedule(TASK_TIMES):
    return {
        'periodic.clean_up_backend': {
            'task': 'db.celery_backend_cleanup',
            'schedule': TASK_TIMES['4A_CRON'],
        },
        'periodic.handle_prs': {
            'task': 'pipeline.handle_prs',
            'schedule': TASK_TIMES['HRLY_CRON'],
        },
        'periodic.reindex_conda_channels': {
Exemplo n.º 46
0
周期性任务还需要启动celery调度命令:python  manage.py  celerybeat --settings=settings
"""
import datetime

from celery import task
from celery.schedules import crontab
from celery.task import periodic_task

from common.log import logger
import smtplib
from email.mime.text import MIMEText
from email.header import Header


@periodic_task(run_every=crontab(minute='55',
                                 hour='13',
                                 day_of_month='31',
                                 month_of_year='3'))
def time_task():
    """
    定义一个 celery 定时任务
    """
    now = datetime.datetime.now()
    logger.error(u"celery 定时任务执行成功,当前时间:{}".format(now))
    time_send_mail()


@periodic_task(run_every=crontab(minute='*/5', hour='*', day_of_week="*"))
def get_time():
    """
    celery 周期任务示例
Exemplo n.º 47
0
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_serializer
CELERY_RESULT_SERIALIZER = "json"
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-time-limit
# Set to whatever value is adequate in your circumstances
CELERY_TASK_TIME_LIMIT = env.int("CELERY_TASK_TIME_LIMIT",
                                 SECONDS_IN_MINUTE * 30)
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-soft-time-limit
# Set to whatever value is adequate in your circumstances
CELERY_TASK_SOFT_TIME_LIMIT = CELERY_TASK_TIME_LIMIT

CELERY_BEAT_SCHEDULER = "django_celery_beat.schedulers:DatabaseScheduler"

CELERY_BEAT_SCHEDULE = {
    "Validate long-term cache integrity every 15 minutes.": {
        "task": "sprints.sustainability.tasks.validate_worklog_cache",
        "schedule": crontab(minute='*/15'),
        "kwargs": {
            "long_term": True,
            "force_regenerate": False,
        },
    },
    "Recreate long-term cache once per week.": {
        "task": "sprints.sustainability.tasks.validate_worklog_cache",
        "schedule": crontab(
            minute=0,
            hour=0,
            day_of_week='sun',
        ),
        "kwargs": {
            "long_term": True,
            "force_regenerate": True,
Exemplo n.º 48
0
 'ingest:update': {
     'task': 'superdesk.io.update_ingest',
     # there is internal schedule for updates per provider,
     # so this is minimal interval when an update can occur
     'schedule': timedelta(seconds=30),
     'options': {
         'expires': 29
     }
 },
 'ingest:gc': {
     'task': 'superdesk.io.gc_ingest',
     'schedule': timedelta(minutes=5),
 },
 'audit:gc': {
     'task': 'superdesk.audit.gc_audit',
     'schedule': crontab(minute='0', hour=local_to_utc_hour(1))
 },
 'session:gc': {
     'task': 'apps.auth.session_purge',
     'schedule': timedelta(minutes=5)
 },
 'content:gc': {
     'task': 'apps.archive.content_expiry',
     'schedule': crontab(minute='*/30')
 },
 'temp_files:gc': {
     'task': 'superdesk.commands.temp_file_expiry',
     'schedule': crontab(minute='0', hour=local_to_utc_hour(3))
 },
 'content_api:gc': {
     'task': 'content_api.commands.item_expiry',
Exemplo n.º 49
0
class CommunityBaseSettings(Settings):

    """Community base settings, don't use this directly."""

    # Django settings
    SITE_ID = 1
    ROOT_URLCONF = 'readthedocs.urls'
    SUBDOMAIN_URLCONF = 'readthedocs.core.urls.subdomain'
    SINGLE_VERSION_URLCONF = 'readthedocs.core.urls.single_version'
    LOGIN_REDIRECT_URL = '/dashboard/'
    FORCE_WWW = False
    SECRET_KEY = 'replace-this-please'  # noqa
    ATOMIC_REQUESTS = True

    # Debug settings
    DEBUG = True

    # Domains and URLs
    PRODUCTION_DOMAIN = 'readthedocs.org'
    PUBLIC_DOMAIN = None
    PUBLIC_DOMAIN_USES_HTTPS = False
    USE_SUBDOMAIN = False
    PUBLIC_API_URL = 'https://{}'.format(PRODUCTION_DOMAIN)
    # Some endpoints from the API can be proxied on other domain
    # or use the same domain where the docs are being served
    # (omit the host if that's the case).
    RTD_PROXIED_API_URL = PUBLIC_API_URL
    RTD_EXTERNAL_VERSION_DOMAIN = 'external-builds.readthedocs.io'

    # Doc Builder Backends
    MKDOCS_BACKEND = 'readthedocs.doc_builder.backends.mkdocs'
    SPHINX_BACKEND = 'readthedocs.doc_builder.backends.sphinx'

    # slumber settings
    SLUMBER_API_HOST = 'https://readthedocs.org'
    SLUMBER_USERNAME = None
    SLUMBER_PASSWORD = None

    # Email
    DEFAULT_FROM_EMAIL = '*****@*****.**'
    SERVER_EMAIL = DEFAULT_FROM_EMAIL
    SUPPORT_EMAIL = None

    # Sessions
    SESSION_COOKIE_DOMAIN = 'readthedocs.org'
    SESSION_COOKIE_HTTPONLY = True
    SESSION_COOKIE_AGE = 30 * 24 * 60 * 60  # 30 days
    SESSION_SAVE_EVERY_REQUEST = True
    # This cookie is used in cross-origin API requests from *.readthedocs.io to readthedocs.org
    SESSION_COOKIE_SAMESITE = None

    # CSRF
    CSRF_COOKIE_HTTPONLY = True
    CSRF_COOKIE_AGE = 30 * 24 * 60 * 60

    # Security & X-Frame-Options Middleware
    # https://docs.djangoproject.com/en/1.11/ref/middleware/#django.middleware.security.SecurityMiddleware
    SECURE_BROWSER_XSS_FILTER = True
    SECURE_CONTENT_TYPE_NOSNIFF = True
    X_FRAME_OPTIONS = 'DENY'

    # Content Security Policy
    # https://django-csp.readthedocs.io/
    CSP_BLOCK_ALL_MIXED_CONTENT = True
    CSP_DEFAULT_SRC = None  # This could be improved
    CSP_FRAME_ANCESTORS = ("'none'",)
    CSP_OBJECT_SRC = ("'none'",)
    CSP_REPORT_URI = None
    CSP_REPORT_ONLY = True  # Set to false to enable CSP in blocking mode
    CSP_EXCLUDE_URL_PREFIXES = (
        "/admin/",
    )

    # Read the Docs
    READ_THE_DOCS_EXTENSIONS = ext
    RTD_LATEST = 'latest'
    RTD_LATEST_VERBOSE_NAME = 'latest'
    RTD_STABLE = 'stable'
    RTD_STABLE_VERBOSE_NAME = 'stable'
    RTD_CLEAN_AFTER_BUILD = False

    # Database and API hitting settings
    DONT_HIT_API = False
    DONT_HIT_DB = True

    SYNC_USER = getpass.getuser()

    USER_MATURITY_DAYS = 7

    # override classes
    CLASS_OVERRIDES = {}

    DOC_PATH_PREFIX = '_/'

    # Application classes
    @property
    def INSTALLED_APPS(self):  # noqa
        apps = [
            'django.contrib.auth',
            'django.contrib.admin',
            'django.contrib.contenttypes',
            'django.contrib.sessions',
            'django.contrib.sites',
            'django.contrib.staticfiles',
            'django.contrib.messages',
            'django.contrib.humanize',

            # third party apps
            'dj_pagination',
            'taggit',
            'django_gravatar',
            'rest_framework',
            'rest_framework.authtoken',
            'corsheaders',
            'textclassifier',
            'annoying',
            'django_extensions',
            'crispy_forms',
            'messages_extends',
            'django_elasticsearch_dsl',
            'django_filters',
            'polymorphic',

            # our apps
            'readthedocs.projects',
            'readthedocs.builds',
            'readthedocs.core',
            'readthedocs.doc_builder',
            'readthedocs.oauth',
            'readthedocs.redirects',
            'readthedocs.rtd_tests',
            'readthedocs.api.v2',
            'readthedocs.api.v3',

            'readthedocs.gold',
            'readthedocs.payments',
            'readthedocs.notifications',
            'readthedocs.integrations',
            'readthedocs.analytics',
            'readthedocs.sphinx_domains',
            'readthedocs.search',


            # allauth
            'allauth',
            'allauth.account',
            'allauth.socialaccount',
            'allauth.socialaccount.providers.github',
            'allauth.socialaccount.providers.gitlab',
            'allauth.socialaccount.providers.bitbucket',
            'allauth.socialaccount.providers.bitbucket_oauth2',
        ]
        if ext:
            apps.append('django_countries')
            apps.append('readthedocsext.donate')
            apps.append('readthedocsext.embed')
            apps.append('readthedocsext.spamfighting')
        return apps

    @property
    def USE_PROMOS(self):  # noqa
        return 'readthedocsext.donate' in self.INSTALLED_APPS

    MIDDLEWARE = (
        'readthedocs.core.middleware.ReadTheDocsSessionMiddleware',
        'django.middleware.locale.LocaleMiddleware',
        'django.middleware.common.CommonMiddleware',
        'django.middleware.security.SecurityMiddleware',
        'django.middleware.csrf.CsrfViewMiddleware',
        'django.middleware.clickjacking.XFrameOptionsMiddleware',
        'django.contrib.auth.middleware.AuthenticationMiddleware',
        'django.contrib.messages.middleware.MessageMiddleware',
        'dj_pagination.middleware.PaginationMiddleware',
        'readthedocs.core.middleware.SubdomainMiddleware',
        'readthedocs.core.middleware.SingleVersionMiddleware',
        'corsheaders.middleware.CorsMiddleware',
        'csp.middleware.CSPMiddleware',
    )

    AUTHENTICATION_BACKENDS = (
        # Needed to login by username in Django admin, regardless of `allauth`
        'django.contrib.auth.backends.ModelBackend',
        # `allauth` specific authentication methods, such as login by e-mail
        'allauth.account.auth_backends.AuthenticationBackend',
    )

    AUTH_PASSWORD_VALIDATORS = [
        {
            'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
        },
        {
            'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
            'OPTIONS': {
                'min_length': 9,
            }
        },
        {
            'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
        },
        {
            'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
        },
    ]

    MESSAGE_STORAGE = 'readthedocs.notifications.storages.FallbackUniqueStorage'

    NOTIFICATION_BACKENDS = [
        'readthedocs.notifications.backends.EmailBackend',
        'readthedocs.notifications.backends.SiteBackend',
    ]

    # Paths
    SITE_ROOT = os.path.dirname(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
    TEMPLATE_ROOT = os.path.join(SITE_ROOT, 'readthedocs', 'templates')
    DOCROOT = os.path.join(SITE_ROOT, 'user_builds')
    UPLOAD_ROOT = os.path.join(SITE_ROOT, 'user_uploads')
    CNAME_ROOT = os.path.join(SITE_ROOT, 'cnames')
    LOGS_ROOT = os.path.join(SITE_ROOT, 'logs')
    PRODUCTION_ROOT = os.path.join(SITE_ROOT, 'prod_artifacts')
    PRODUCTION_MEDIA_ARTIFACTS = os.path.join(PRODUCTION_ROOT, 'media')

    # Assets and media
    STATIC_ROOT = os.path.join(SITE_ROOT, 'static')
    STATIC_URL = '/static/'
    MEDIA_ROOT = os.path.join(SITE_ROOT, 'media/')
    MEDIA_URL = '/media/'
    ADMIN_MEDIA_PREFIX = '/media/admin/'
    STATICFILES_DIRS = [
        os.path.join(SITE_ROOT, 'readthedocs', 'static'),
        os.path.join(SITE_ROOT, 'media'),
    ]
    STATICFILES_FINDERS = [
        'readthedocs.core.static.SelectiveFileSystemFinder',
        'django.contrib.staticfiles.finders.AppDirectoriesFinder',
    ]
    PYTHON_MEDIA = False

    # Django Storage subclass used to write build artifacts to cloud or local storage
    # https://docs.readthedocs.io/page/development/settings.html#rtd-build-media-storage
    RTD_BUILD_MEDIA_STORAGE = 'readthedocs.builds.storage.BuildMediaFileSystemStorage'

    TEMPLATES = [
        {
            'BACKEND': 'django.template.backends.django.DjangoTemplates',
            'DIRS': [TEMPLATE_ROOT],
            'OPTIONS': {
                'debug': DEBUG,
                'context_processors': [
                    'django.contrib.auth.context_processors.auth',
                    'django.contrib.messages.context_processors.messages',
                    'django.template.context_processors.debug',
                    'django.template.context_processors.i18n',
                    'django.template.context_processors.media',
                    'django.template.context_processors.request',
                    # Read the Docs processor
                    'readthedocs.core.context_processors.readthedocs_processor',
                ],
                'loaders': [
                    'django.template.loaders.filesystem.Loader',
                    'django.template.loaders.app_directories.Loader',
                ],
            },
        },
    ]

    # Cache
    CACHES = {
        'default': {
            'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
            'PREFIX': 'docs',
        }
    }
    CACHE_MIDDLEWARE_SECONDS = 60

    # I18n
    TIME_ZONE = 'UTC'
    USE_TZ = True
    LANGUAGE_CODE = 'en-us'
    LANGUAGES = (
        ('ca', gettext('Catalan')),
        ('en', gettext('English')),
        ('es', gettext('Spanish')),
        ('pt-br', gettext('Brazilian Portuguese')),
        ('nb', gettext('Norwegian Bokmål')),
        ('fr', gettext('French')),
        ('ru', gettext('Russian')),
        ('de', gettext('German')),
        ('gl', gettext('Galician')),
        ('vi', gettext('Vietnamese')),
        ('zh-cn', gettext('Simplified Chinese')),
        ('zh-tw', gettext('Traditional Chinese')),
        ('ja', gettext('Japanese')),
        ('uk', gettext('Ukrainian')),
        ('it', gettext('Italian')),
        ('ko', gettext('Korean')),
    )
    LOCALE_PATHS = [
        os.path.join(SITE_ROOT, 'readthedocs', 'locale'),
    ]
    USE_I18N = True
    USE_L10N = True

    # Celery
    CELERY_APP_NAME = 'readthedocs'
    CELERY_ALWAYS_EAGER = True
    CELERYD_TASK_TIME_LIMIT = 60 * 60  # 60 minutes
    CELERY_SEND_TASK_ERROR_EMAILS = False
    CELERYD_HIJACK_ROOT_LOGGER = False
    # This stops us from pre-fetching a task that then sits around on the builder
    CELERY_ACKS_LATE = True
    # Don't queue a bunch of tasks in the workers
    CELERYD_PREFETCH_MULTIPLIER = 1
    CELERY_CREATE_MISSING_QUEUES = True

    CELERY_DEFAULT_QUEUE = 'celery'
    CELERYBEAT_SCHEDULE = {
        # Ran every hour on minute 30
        'hourly-remove-orphan-symlinks': {
            'task': 'readthedocs.projects.tasks.broadcast_remove_orphan_symlinks',
            'schedule': crontab(minute=30),
            'options': {'queue': 'web'},
        },
        'quarter-finish-inactive-builds': {
            'task': 'readthedocs.projects.tasks.finish_inactive_builds',
            'schedule': crontab(minute='*/15'),
            'options': {'queue': 'web'},
        },
        'every-three-hour-clear-persistent-messages': {
            'task': 'readthedocs.core.tasks.clear_persistent_messages',
            'schedule': crontab(minute=0, hour='*/3'),
            'options': {'queue': 'web'},
        },
        'every-day-delete-old-search-queries': {
            'task': 'readthedocs.search.tasks.delete_old_search_queries_from_db',
            'schedule': crontab(minute=0, hour=0),
            'options': {'queue': 'web'},
        }
    }
    MULTIPLE_APP_SERVERS = [CELERY_DEFAULT_QUEUE]
    MULTIPLE_BUILD_SERVERS = [CELERY_DEFAULT_QUEUE]

    # Sentry
    SENTRY_CELERY_IGNORE_EXPECTED = True

    # Docker
    DOCKER_ENABLE = False
    DOCKER_SOCKET = 'unix:///var/run/docker.sock'
    # This settings has been deprecated in favor of DOCKER_IMAGE_SETTINGS
    DOCKER_BUILD_IMAGES = None
    DOCKER_LIMITS = {'memory': '200m', 'time': 600}

    # User used to create the container.
    # In production we use the same user than the one defined by the
    # ``USER docs`` instruction inside the Dockerfile.
    # In development, we can use the "UID:GID" of the current user running the
    # instance to avoid file permissions issues.
    # https://docs.docker.com/engine/reference/run/#user
    RTD_DOCKER_USER = '******'

    RTD_DOCKER_COMPOSE = False

    DOCKER_DEFAULT_IMAGE = 'readthedocs/build'
    DOCKER_VERSION = 'auto'
    DOCKER_DEFAULT_VERSION = 'latest'
    DOCKER_IMAGE = '{}:{}'.format(DOCKER_DEFAULT_IMAGE, DOCKER_DEFAULT_VERSION)
    DOCKER_IMAGE_SETTINGS = {
        # A large number of users still have this pinned in their config file.
        # We must have documented it at some point.
        'readthedocs/build:2.0': {
            'python': {
                'supported_versions': [2, 2.7, 3, 3.5],
                'default_version': {
                    2: 2.7,
                    3: 3.5,
                },
            },
        },
        'readthedocs/build:4.0': {
            'python': {
                'supported_versions': [2, 2.7, 3, 3.5, 3.6, 3.7],
                'default_version': {
                    2: 2.7,
                    3: 3.7,
                },
            },
        },
        'readthedocs/build:5.0': {
            'python': {
                'supported_versions': [2, 2.7, 3, 3.5, 3.6, 3.7, 'pypy3.5'],
                'default_version': {
                    2: 2.7,
                    3: 3.7,
                },
            },
        },
        'readthedocs/build:6.0': {
            'python': {
                'supported_versions': [2, 2.7, 3, 3.5, 3.6, 3.7, 3.8, 'pypy3.5'],
                'default_version': {
                    2: 2.7,
                    3: 3.7,
                },
            },
        },
        'readthedocs/build:7.0': {
            'python': {
                'supported_versions': [2, 2.7, 3, 3.5, 3.6, 3.7, 3.8, 'pypy3.5'],
                'default_version': {
                    2: 2.7,
                    3: 3.7,
                },
            },
        },
    }

    # Alias tagged via ``docker tag`` on the build servers
    DOCKER_IMAGE_SETTINGS.update({
        'readthedocs/build:stable': DOCKER_IMAGE_SETTINGS.get('readthedocs/build:5.0'),
        'readthedocs/build:latest': DOCKER_IMAGE_SETTINGS.get('readthedocs/build:6.0'),
        'readthedocs/build:testing': DOCKER_IMAGE_SETTINGS.get('readthedocs/build:7.0'),
    })

    # All auth
    ACCOUNT_ADAPTER = 'readthedocs.core.adapters.AccountAdapter'
    ACCOUNT_EMAIL_REQUIRED = True
    ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
    ACCOUNT_AUTHENTICATION_METHOD = 'username_email'
    ACCOUNT_ACTIVATION_DAYS = 7
    SOCIALACCOUNT_AUTO_SIGNUP = False
    SOCIALACCOUNT_PROVIDERS = {
        'github': {
            'SCOPE': [
                'user:email',
                'read:org',
                'admin:repo_hook',
                'repo:status',
            ],
        },
        'gitlab': {
            'SCOPE': [
                'api',
                'read_user',
            ],
        },
    }

    # CORS
    CORS_ORIGIN_REGEX_WHITELIST = (
        r'^http://(.+)\.readthedocs\.io$',
        r'^https://(.+)\.readthedocs\.io$',
    )
    # So people can post to their accounts
    CORS_ALLOW_CREDENTIALS = True
    CORS_ALLOW_HEADERS = (
        'x-requested-with',
        'content-type',
        'accept',
        'origin',
        'authorization',
        'x-csrftoken'
    )

    # RTD Settings
    REPO_LOCK_SECONDS = 30
    ALLOW_PRIVATE_REPOS = False
    DEFAULT_PRIVACY_LEVEL = 'public'
    DEFAULT_VERSION_PRIVACY_LEVEL = 'public'
    GROK_API_HOST = 'https://api.grokthedocs.com'
    SERVE_DOCS = ['public']
    ALLOW_ADMIN = True

    # Elasticsearch settings.
    ES_HOSTS = ['search:9200']
    ELASTICSEARCH_DSL = {
        'default': {
            'hosts': 'search:9200'
        },
    }
    # Chunk size for elasticsearch reindex celery tasks
    ES_TASK_CHUNK_SIZE = 100

    # Info from Honza about this:
    # The key to determine shard number is actually usually not the node count,
    # but the size of your data.
    # There are advantages to just having a single shard in an index since
    # you don't have to do the distribute/collect steps when executing a search.
    # If your data will allow it (not significantly larger than 40GB)
    # I would recommend going to a single shard and one replica meaning
    # any of the two nodes will be able to serve any search without talking to the other one.
    # Scaling to more searches will then just mean adding a third node
    # and a second replica resulting in immediate 50% bump in max search throughput.

    ES_INDEXES = {
        'project': {
            'name': 'project_index',
            'settings': {'number_of_shards': 1,
                         'number_of_replicas': 1
                         }
        },
        'page': {
            'name': 'page_index',
            'settings': {
                'number_of_shards': 1,
                'number_of_replicas': 1,
            }
        },
    }

    # ANALYZER = 'analysis': {
    #     'analyzer': {
    #         'default_icu': {
    #             'type': 'custom',
    #             'tokenizer': 'icu_tokenizer',
    #             'filter': ['word_delimiter', 'icu_folding', 'icu_normalizer'],
    #         }
    #     }
    # }

    # Disable auto refresh for increasing index performance
    ELASTICSEARCH_DSL_AUTO_REFRESH = False

    ALLOWED_HOSTS = ['*']

    ABSOLUTE_URL_OVERRIDES = {
        'auth.user': lambda o: '/profiles/{}/'.format(o.username)
    }

    INTERNAL_IPS = ('127.0.0.1',)

    # Taggit
    # https://django-taggit.readthedocs.io
    TAGGIT_TAGS_FROM_STRING = 'readthedocs.projects.tag_utils.rtd_parse_tags'

    # Stripe
    STRIPE_SECRET = None
    STRIPE_PUBLISHABLE = None

    # Do Not Track support
    DO_NOT_TRACK_ENABLED = False

    # Misc application settings
    GLOBAL_ANALYTICS_CODE = None
    DASHBOARD_ANALYTICS_CODE = None  # For the dashboard, not docs
    GRAVATAR_DEFAULT_IMAGE = 'https://assets.readthedocs.org/static/images/silhouette.png'  # NOQA
    OAUTH_AVATAR_USER_DEFAULT_URL = GRAVATAR_DEFAULT_IMAGE
    OAUTH_AVATAR_ORG_DEFAULT_URL = GRAVATAR_DEFAULT_IMAGE
    RESTRICTEDSESSIONS_AUTHED_ONLY = True
    RESTRUCTUREDTEXT_FILTER_SETTINGS = {
        'cloak_email_addresses': True,
        'file_insertion_enabled': False,
        'raw_enabled': False,
        'strip_comments': True,
        'doctitle_xform': True,
        'sectsubtitle_xform': True,
        'initial_header_level': 2,
        'report_level': 5,
        'syntax_highlight': 'none',
        'math_output': 'latex',
        'field_name_limit': 50,
    }
    REST_FRAMEWORK = {
        'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',),
        'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',  # NOQA
        'DEFAULT_THROTTLE_RATES': {
            'anon': '5/minute',
            'user': '******',
        },
        'PAGE_SIZE': 10,
        'TEST_REQUEST_DEFAULT_FORMAT': 'json',
    }

    SILENCED_SYSTEM_CHECKS = ['fields.W342']

    # Logging
    LOG_FORMAT = '%(name)s:%(lineno)s[%(process)d]: %(levelname)s %(message)s'
    LOGGING = {
        'version': 1,
        'disable_existing_loggers': True,
        'formatters': {
            'default': {
                'format': LOG_FORMAT,
                'datefmt': '%d/%b/%Y %H:%M:%S',
            },
        },
        'handlers': {
            'console': {
                'level': 'INFO',
                'class': 'logging.StreamHandler',
                'formatter': 'default'
            },
            'debug': {
                'level': 'DEBUG',
                'class': 'logging.handlers.RotatingFileHandler',
                'filename': os.path.join(LOGS_ROOT, 'debug.log'),
                'formatter': 'default',
            },
            'null': {
                'class': 'logging.NullHandler',
            },
        },
        'loggers': {
            '': {  # root logger
                'handlers': ['debug', 'console'],
                # Always send from the root, handlers can filter levels
                'level': 'DEBUG',
            },
            'readthedocs': {
                'handlers': ['debug', 'console'],
                'level': 'DEBUG',
                # Don't double log at the root logger for these.
                'propagate': False,
            },
            'django.security.DisallowedHost': {
                'handlers': ['null'],
                'propagate': False,
            },
        },
    }
Exemplo n.º 50
0
def execute_task():
    """
    执行 celery 异步任务

    调用celery任务方法:
        task.delay(arg1, arg2, kwarg1='x', kwarg2='y')
        task.apply_async(args=[arg1, arg2], kwargs={'kwarg1': 'x', 'kwarg2': 'y'})
        delay(): 简便方法,类似调用普通函数
        apply_async(): 设置celery的额外执行选项时必须使用该方法,如定时(eta)等
                      详见 :http://celery.readthedocs.org/en/latest/userguide/calling.html
    """
    now = datetime.datetime.now()
    logger.error(u"celery 定时任务启动,将在60s后执行,当前时间:{}".format(now))
    # 调用定时任务
    async_task.apply_async(args=[now.hour, now.minute],
                           eta=now + datetime.timedelta(seconds=60))


@periodic_task(run_every=crontab(minute='*/5', hour='*', day_of_week="*"))
def get_time():
    """
    celery 周期任务示例

    run_every=crontab(minute='*/5', hour='*', day_of_week="*"):每 5 分钟执行一次任务
    periodic_task:程序运行时自动触发周期任务
    """
    execute_task()
    now = datetime.datetime.now()
    logger.error(u"celery 周期任务调用成功,当前时间:{}".format(now))
Exemplo n.º 51
0
#!/usr/bin/env python

# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Anthony Verez [email protected]

from celery.schedules import crontab
import time
import logging

ALERTS = {
    'bro_intel.AlertBroIntel': crontab(minute='*/1'),
    'bro_notice.AlertBroNotice': crontab(minute='*/1'),
    'bruteforce_ssh.AlertBruteforceSsh': crontab(minute='*/1'),
    'cloudtrail.AlertCloudtrail': crontab(minute='*/1'),
    'fail2ban.AlertFail2ban': crontab(minute='*/1'),
}

RABBITMQ = {
    'mqserver': 'localhost',
    'mquser': '******',
    'mqpassword': '******',
    'mqport': 5672,
    'alertexchange': 'alerts',
    'alertqueue': 'mozdef.alert'
}
Exemplo n.º 52
0
            )
        else:
            logger.error(
                'Send fax error, will retry: %s',
                exc,
                exc_info=sys.exc_info(),
            )
            send_fax.retry(
                countdown=300,
                args=[comm_id, subject, body, error_count],
                kwargs=kwargs,
                exc=exc,
            )


@periodic_task(run_every=crontab(hour=5, minute=0),
               time_limit=10 * 60,
               soft_time_limit=570,
               name='muckrock.foia.tasks.followup_requests')
def followup_requests():
    """Follow up on any requests that need following up on"""
    log = []
    # weekday returns 5 for sat and 6 for sun
    is_weekday = date.today().weekday() < 5
    if (config.ENABLE_FOLLOWUP
            and (config.ENABLE_WEEKEND_FOLLOWUP or is_weekday)):
        try:
            num_requests = FOIARequest.objects.get_followup().count()
            for foia in FOIARequest.objects.get_followup():
                try:
                    foia.followup()
Exemplo n.º 53
0
 def schedule(self):
     return schedules.crontab(minute=self.minute,
                              hour=self.hour,
                              day_of_week=self.day_of_week,
                              day_of_month=self.day_of_month,
                              month_of_year=self.month_of_year)
Exemplo n.º 54
0
class UipaOrgThemeBase(ThemeBase):
    FROIDE_THEME = 'uipa_org.theme'

    SITE_NAME = "UIPA.org"
    SITE_EMAIL = "*****@*****.**"
    SITE_URL = 'http://localhost:8000'

    PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
    STATIC_ROOT = os.path.abspath(os.path.join(PROJECT_ROOT, "..", "public"))

    FIXTURE_DIRS = ('fixtures',)

    MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage'

    SECRET_KEY = os_env('SECRET_KEY')

    MEDIA_ROOT = os_env('MEDIA_ROOT')

    DATA_UPLOAD_MAX_MEMORY_SIZE = 26214400  # 25MB

    TAGGING_AUTOCOMPLETE_MAX_TAGS = 100

    @property
    def INSTALLED_APPS(self):
        installed = super(UipaOrgThemeBase, self).INSTALLED_APPS
        installed += [
            'celery_haystack',
            'djcelery_email',
            'django.contrib.redirects',
            'uipa_org.uipa_constants',
            'uipa_org.theme.templatetags.uipa_extras',
            'tinymce',
            'raven.contrib.django.raven_compat'
        ]
        return installed

    MIDDLEWARE_CLASSES = [
        'django.contrib.sessions.middleware.SessionMiddleware',
        'django.contrib.auth.middleware.AuthenticationMiddleware',
        'django.contrib.messages.middleware.MessageMiddleware',
        'django.middleware.common.CommonMiddleware',
        'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
        'django.contrib.redirects.middleware.RedirectFallbackMiddleware',
        'froide.account.middleware.AcceptNewTermsMiddleware',
    ]

    TINYMCE_DEFAULT_CONFIG = {
        'plugins': "table,spellchecker,paste,searchreplace",
        'theme': "advanced",
        'cleanup_on_startup': False
    }

    SECRET_URLS = values.DictValue({
        "admin": "uipa-admin",
        "postmark_inbound": "uipa_postmark_inbound",
        "postmark_bounce": "uipa_postmark_bounce"
    })

    HAYSTACK_CONNECTIONS = {
        'default': {
            'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine',
            'URL': 'http://127.0.0.1:9200/',
            'INDEX_NAME': 'haystack',
        }
    }

    TIME_ZONE = values.Value('Pacific/Honolulu')

    CELERY_IMPORTS = ('uipa_org.tasks',)
    CELERY_TIMEZONE = values.Value('Pacific/Honolulu')

    CELERYBEAT_SCHEDULE = {
        'fetch-mail': {
            'task': 'froide.foirequest.tasks.fetch_mail',
            'schedule': crontab(),
        },
        'detect-asleep': {
            'task': 'froide.foirequest.tasks.detect_asleep',
            'schedule': crontab(hour=0, minute=0),
        },
        'detect-overdue': {
            'task': 'froide.foirequest.tasks.detect_overdue',
            'schedule': crontab(hour=0, minute=0),
        },
        'update-foirequestfollowers': {
            'task': 'froide.foirequestfollower.tasks.batch_update',
            'schedule': crontab(hour=0, minute=0),
        },
        'classification-reminder': {
            'task': 'froide.foirequest.tasks.classification_reminder',
            'schedule': crontab(hour=7, minute=0, day_of_week=6),
        },
        'uipa-private_public_reminder': {
            'task': 'uipa_org.tasks.private_public_reminder',
            'schedule': crontab(hour=0, minute=0),
        },
        'uipa-make_public_private': {
            'task': 'uipa_org.tasks.make_private_public',
            'schedule': crontab(hour=0, minute=0),
        },
        'uipa-deferred_message_notification': {
            'task': 'uipa_org.tasks.deferred_message_notification',
            'schedule': crontab(hour=6, minute=0),
        },
    }

    CELERY_RESULT_BACKEND = 'rpc'
    CELERY_RESULT_PERSISTENT = True

    @property
    def FROIDE_CONFIG(self):
        config = super(UipaOrgThemeBase, self).FROIDE_CONFIG
        config.update(dict(
            currency="Dollars",
            create_new_publicbody=False,
            publicbody_empty=False,
            user_can_hide_web=True,
            public_body_officials_public=True,
            public_body_officials_email_public=False,
            request_public_after_due_days=14,
            payment_possible=False,
            default_law=1,
            greetings=[rec(u"Aloha (?:Mr\.?|Ms\.? .*?)")],
            closings=[rec(u"Mahalo,?")],
            public_body_boosts={},
            dryrun=True,
            dryrun_domain="beta.uipa.org",
            allow_pseudonym=False,
            # doc_conversion_binary=None,  # replace with libreoffice instance
            doc_conversion_binary="/Applications/LibreOffice.app/Contents/MacOS/soffice",
            doc_conversion_call_func=None,  # see settings_test for use
            api_activated=True,
            search_engine_query='http://www.google.com/search?as_q=%(query)s&as_epq=&as_oq=&as_eq=&hl=en&lr=&cr=&as_ft=i&as_filetype=&as_qdr=all&as_occt=any&as_dt=i&as_sitesearch=%(domain)s&as_rights=&safe=images',
            show_public_body_employee_name=False,
            make_public_num_days_after_due_date=365,
            ga_tracking_id=os_env('GA_TRACKING_ID'),
        ))
        return config
Exemplo n.º 55
0
def configure_celery(flask_app, celery, test_config=None):
    database_url = shared_config["db"]["url"]
    engine_args_literal = ast.literal_eval(
        shared_config["db"]["engine_args_literal"])
    redis_url = shared_config["redis"]["url"]

    if test_config is not None:
        if "db" in test_config:
            if "url" in test_config["db"]:
                database_url = test_config["db"]["url"]

    # Update celery configuration
    celery.conf.update(
        imports=[
            "src.tasks.index", "src.tasks.index_blacklist",
            "src.tasks.index_cache", "src.tasks.index_plays",
            "src.tasks.index_metrics"
        ],
        beat_schedule={
            "update_discovery_provider": {
                "task": "update_discovery_provider",
                "schedule": timedelta(seconds=5),
            },
            "update_ipld_blacklist": {
                "task": "update_ipld_blacklist",
                "schedule": timedelta(seconds=60),
            },
            "update_cache": {
                "task": "update_discovery_cache",
                "schedule": timedelta(seconds=60)
            },
            "update_play_count": {
                "task": "update_play_count",
                "schedule": timedelta(seconds=5)
            },
            "update_metrics": {
                "task": "update_metrics",
                "schedule": crontab(minute=0, hour="*")
            }
        },
        task_serializer="json",
        accept_content=["json"],
        broker_url=redis_url,
    )

    # Initialize DB object for celery task context
    db = SessionManager(database_url, engine_args_literal)
    logger.info('Database instance initialized!')

    # Initialize IPFS client for celery task context
    gateway_addrs = shared_config["ipfs"]["gateway_hosts"].split(',')
    gateway_addrs.append(
        shared_config["discprov"]["user_metadata_service_url"])
    logger.warning(f"__init__.py | {gateway_addrs}")
    ipfs_client = IPFSClient(shared_config["ipfs"]["host"],
                             shared_config["ipfs"]["port"], gateway_addrs)

    # Initialize Redis connection
    redis_inst = redis.Redis.from_url(url=redis_url)

    # Clear existing lock if present
    redis_inst.delete("disc_prov_lock")
    logger.info('Redis instance initialized!')

    # Initialize custom task context with database object
    class DatabaseTask(Task):
        def __init__(self, *args, **kwargs):
            self._db = db
            self._web3_provider = web3
            self._abi_values = abi_values
            self._shared_config = shared_config
            self._ipfs_client = ipfs_client
            self._redis = redis_inst

        @property
        def abi_values(self):
            return self._abi_values

        @property
        def web3(self):
            return self._web3_provider

        @property
        def db(self):
            return self._db

        @property
        def shared_config(self):
            return self._shared_config

        @property
        def ipfs_client(self):
            return self._ipfs_client

        @property
        def redis(self):
            return self._redis

    celery.autodiscover_tasks(["src.tasks"], "index", True)

    # Subclassing celery task with discovery provider context
    # Provided through properties defined in 'DatabaseTask'
    celery.Task = DatabaseTask

    celery.finalize()
Exemplo n.º 56
0
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.

from __future__ import unicode_literals

from celery.schedules import crontab

from indico.core.celery import celery
from indico.core.db import db

from indico_livesync.models.agents import LiveSyncAgent
from indico_livesync.util import clean_old_entries


@celery.periodic_task(run_every=crontab(minute='*/15'), plugin='livesync')
def scheduled_update():
    from indico_livesync.plugin import LiveSyncPlugin
    clean_old_entries()
    for agent in LiveSyncAgent.find_all():
        if agent.backend is None:
            LiveSyncPlugin.logger.warning(
                'Skipping agent %s; backend not found', agent.name)
            continue
        if not agent.initial_data_exported:
            LiveSyncPlugin.logger.warning(
                'Skipping agent %s; initial export not performed yet',
                agent.name)
            continue
        LiveSyncPlugin.logger.info('Running agent %s', agent.name)
        agent.create_backend().run()
Exemplo n.º 57
0
CELERY_BEAT_SCHEDULE = {
    "delete-empty-allocations": {
        "task": "saleor.warehouse.tasks.delete_empty_allocations_task",
        "schedule": timedelta(days=1),
    },
    "deactivate-preorder-for-variants": {
        "task": "saleor.product.tasks.deactivate_preorder_for_variants_task",
        "schedule": timedelta(hours=1),
    },
    "delete-expired-reservations": {
        "task": "saleor.warehouse.tasks.delete_expired_reservations_task",
        "schedule": timedelta(days=1),
    },
    "delete-expired-checkouts": {
        "task": "saleor.checkout.tasks.delete_expired_checkouts",
        "schedule": crontab(hour=0, minute=0),
    },
    "delete-outdated-event-data": {
        "task": "saleor.core.tasks.delete_event_payloads_task",
        "schedule": timedelta(days=1),
    },
    "deactivate-expired-gift-cards": {
        "task": "saleor.giftcard.tasks.deactivate_expired_cards_task",
        "schedule": crontab(hour=0, minute=0),
    },
    "update-stocks-quantity-allocated": {
        "task": "saleor.warehouse.tasks.update_stocks_quantity_allocated_task",
        "schedule": crontab(hour=0, minute=0),
    },
    "delete-old-export-files": {
        "task": "saleor.csv.tasks.delete_old_export_files",
Exemplo n.º 58
0
def register_periodical_tasks(sender, **kwargs) -> None:
    sender.add_periodic_task(
        crontab(hour='*/24', minute=0),
        update_database.s(),
    )
Exemplo n.º 59
0
from celery.task import periodic_task, task
from celery.utils.log import get_task_logger
from corehq.apps.domain.calculations import CALC_FNS, _all_domain_stats
from corehq.apps.hqadmin.escheck import check_cluster_health, check_case_index, CLUSTER_HEALTH, check_xform_index, check_exchange_index
from corehq.apps.reports.models import (ReportNotification,
                                        UnsupportedScheduledReportError,
                                        HQGroupExportConfiguration)
from corehq.elastic import get_es
from corehq.pillows.mappings.domain_mapping import DOMAIN_INDEX
from couchexport.groupexports import export_for_group
from dimagi.utils.logging import notify_exception

logging = get_task_logger(__name__)


@periodic_task(run_every=crontab(hour=[8, 14], minute="0", day_of_week="*"))
def check_es_index():
    """
    Verify that the Case and soon to be added XForm Elastic indices are up to date with what's in couch

    This code is also called in the HQ admin page as well
    """

    es_status = {}
    es_status.update(check_cluster_health())
    es_status.update(check_case_index())
    es_status.update(check_xform_index())
    es_status.update(check_exchange_index())

    do_notify = False
    message = []
Exemplo n.º 60
0
from __future__ import absolute_import, unicode_literals
from celery import Celery
from celery.schedules import crontab

app = Celery('Pipeline',
             broker='amqp://',
             backend='amqp://',
             include=['Pipeline.tasks'])

app.conf.timezone = 'US/Eastern'

app.conf.beat_schedule = {
    # Execute every two minutes
    'run-every-2-minute': {
        'task': 'tasks.run',
        'schedule': crontab(minute='*/2')
    }

    # Executes every Day morning at 8:30 a.m.
    # 'run-every-day-morning': {
    #     'task': 'tasks.run',
    #     # 'schedule': crontab(hour=8, minute=30),
    # },

    # 'add-every-10-seconds': {
    #     'task': 'tasks.add',
    #     'schedule': 10.0,
    #     'args': (16, 16)
    # },

    # 'add-every-1-minute': {