예제 #1
0
파일: test_task.py 프로젝트: clayg/celery
 def test_crontab_spec_hour_formats(self):
     c = crontab(hour=6)
     self.assertEquals(c.hour, set([6]))
     c = crontab(hour='5')
     self.assertEquals(c.hour, set([5]))
     c = crontab(hour=(4, 8, 12))
     self.assertEquals(c.hour, set([4, 8, 12]))
예제 #2
0
파일: test_task.py 프로젝트: jokar/minion
 def test_crontab_spec_hour_formats(self):
     c = crontab(hour=6)
     self.assertEquals(c.hour, set([6]))
     c = crontab(hour='5')
     self.assertEquals(c.hour, set([5]))
     c = crontab(hour=(4, 8, 12))
     self.assertEquals(c.hour, set([4, 8, 12]))
예제 #3
0
파일: test_task.py 프로젝트: clayg/celery
 def test_crontab_spec_minute_formats(self):
     c = crontab(minute=30)
     self.assertEquals(c.minute, set([30]))
     c = crontab(minute='30')
     self.assertEquals(c.minute, set([30]))
     c = crontab(minute=(30, 40, 50))
     self.assertEquals(c.minute, set([30, 40, 50]))
     c = crontab(minute=set([30, 40, 50]))
     self.assertEquals(c.minute, set([30, 40, 50]))
예제 #4
0
파일: test_task.py 프로젝트: jokar/minion
 def test_crontab_spec_minute_formats(self):
     c = crontab(minute=30)
     self.assertEquals(c.minute, set([30]))
     c = crontab(minute='30')
     self.assertEquals(c.minute, set([30]))
     c = crontab(minute=(30, 40, 50))
     self.assertEquals(c.minute, set([30, 40, 50]))
     c = crontab(minute=set([30, 40, 50]))
     self.assertEquals(c.minute, set([30, 40, 50]))
class UpdateJobs():
    """
    Task that update API data
    All jobs run periodically at midnight
    """
    @staticmethod
    @periodic_task(run_every=(crontab(minute=0, hour=0)),
                   name="update_news_data",
                   ignore_result=True)
    def update_news_data():
        """ Update news data """
        api_translator = APITranslator("news", 1)
        response = api_translator.build_api_request()
        models = api_translator.response_to_model(response)

        for model in models:
            model.save()

    @staticmethod
    @periodic_task(run_every=(crontab(minute=0, hour=0)),
                   name="update_pollution_data",
                   ignore_result=True)
    def update_pollution_data():
        """ Update pollution data """
        api_translator = APITranslator("pollution", 1)
        response = api_translator.build_api_request()
        models = api_translator.response_to_model(response)

        for model in models:
            model.save()

    @staticmethod
    @periodic_task(run_every=(crontab(minute=0, hour=0)),
                   name="update_bus_data",
                   ignore_result=True)
    def update_bus_data():
        """ Update bus data """
        api_translator = APITranslator("bus", 1)
        response = api_translator.build_api_request()
        models = api_translator.response_to_model(response)

        for model in models:
            model.save()

    @staticmethod
    @periodic_task(run_every=(crontab(minute=0, hour=0)),
                   name="update_bikes_data",
                   ignore_result=True)
    def update_bikes_data():
        """ Update bikes data """
        api_translator = APITranslator("bikes", 1)
        response = api_translator.build_api_request()
        models = api_translator.response_to_model(response)

        for model in models:
            model.save()
예제 #6
0
    def add_adminconfig(self):
        topic_ex = Exchange("admin_topic", type="topic", auto_delete=True, durable=True)

        self.CELERYBEAT_SCHEDULE.update(
            {
                "admin_consumer": {
                    "task": "app.admin_manager.adminConsumer",
                    "schedule": timedelta(seconds=2),
                    "args": ("admin_" + self.CB_CLUSTER_TAG,),
                },
                "xdcr_consumer": {
                    "task": "app.admin_manager.xdcrConsumer",
                    "schedule": timedelta(seconds=2),
                    "args": ("xdcr_" + self.CB_CLUSTER_TAG,),
                },
                "do_backup": {  # every once per day
                    "task": "app.admin_manager.backup_task",
                    "schedule": crontab(minute=0, hour=0),  # Execute daily at midnight.
                    "args": [cfg.ENABLE_BACKUPS],
                },
            }
        )

        self.CELERY_QUEUES = self.CELERY_QUEUES + (
            # schedulable queue for multiple tasks
            self.make_queue("admin_tasks", "admin_tasks.#", topic_ex),
        )

        self.CELERY_ROUTES = self.CELERY_ROUTES + (  # route schedulable tasks both to same interal task queue
            {"app.admin_manager.adminConsumer": self.route_args("admin_tasks", "admin_tasks.adminconsumer")},
            {"app.admin_manager.xdcrConsumer": self.route_args("admin_tasks", "admin_tasks.xdcrconsumer")},
            {"app.admin_manager.backup_task": self.route_args("admin_tasks", "admin_tasks.backuptasks")},
            {"app.rest_client_tasks.perform_admin_tasks": self.route_args("admin_tasks", "admin_tasks.performadmin")},
            {"app.rest_client_tasks.perform_xdcr_tasks": self.route_args("admin_tasks", "admin_tasks.performxdcr")},
        )
예제 #7
0
class ProcessRefTypes(PeriodicTask):
    """
    Reloads the refTypeID to name mappings. Done daily at 00:00 just before history is processed.
    """

    run_every = crontab(hour=0, minute=0)

    def run(self, **kwargs):

        logger.debug('Updating refTypeIDs...')

        api = eveapi.EVEAPIConnection()
        ref_types = api.eve.RefTypes()

        for ref_type in ref_types.refTypes:
            # Try to find mapping in DB. If found -> update. If not found -> create
            try:
                type_object = RefType.objects.get(id=ref_type.refTypeID)
                type_object.name = ref_type.refTypeName
                type_object.save()

            except RefType.DoesNotExist:
                type_object = RefType(id=ref_type.refTypeID, name=ref_type.refTypeName)
                type_object.save()

        logger.info('Imported %d refTypeIDs from API.' % len(ref_types.refTypes))
예제 #8
0
def task_queue_creator(time):
    for t in time:
        hour, minute = get_hour_and_minute(t)

        @periodic_task(run_every=crontab(hour=hour,
                                         minute=minute,
                                         day_of_week="*"))
        def every_day_backup_task_queue(time):
            pass
예제 #9
0
class SearchIndexUpdatePeriodicTask(PeriodicTask):
    routing_key = 'periodic.search.update_index'
    run_every = crontab(hour=4, minute=0)

    def run(self, **kwargs):
        logger = self.get_logger(**kwargs)
        logger.info("Starting update index")
        # Run the update_index management command
        update_index.Command().handle()
        logger.info("Finishing update index")
예제 #10
0
class ProcessHistory(PeriodicTask):
    """
    Post-process history table
    """

    # execute at midnight +1 minute UTC
    run_every = crontab(hour=0, minute=1)

    #run_every = datetime.timedelta(minutes=2)

    def run(self, **kwargs):
        regions = History.objects.order_by('mapregion__id').distinct(
            'mapregion')
        for region in regions.iterator():
            ProcessRegionHistory.delay(region.mapregion)

        logger.warning("Scheduled %d history updates." % len(regions))
예제 #11
0
파일: test_task.py 프로젝트: jokar/minion
 def test_crontab_spec_dow_formats(self):
     c = crontab(day_of_week=5)
     self.assertEquals(c.day_of_week, set([5]))
     c = crontab(day_of_week='5')
     self.assertEquals(c.day_of_week, set([5]))
     c = crontab(day_of_week='fri')
     self.assertEquals(c.day_of_week, set([5]))
     c = crontab(day_of_week='tuesday,sunday,fri')
     self.assertEquals(c.day_of_week, set([0, 2, 5]))
     c = crontab(day_of_week='mon-fri')
     self.assertEquals(c.day_of_week, set([1, 2, 3, 4, 5]))
     c = crontab(day_of_week='*/2')
     self.assertEquals(c.day_of_week, set([0, 2, 4, 6]))
예제 #12
0
파일: test_task.py 프로젝트: clayg/celery
 def test_crontab_spec_dow_formats(self):
     c = crontab(day_of_week=5)
     self.assertEquals(c.day_of_week, set([5]))
     c = crontab(day_of_week='5')
     self.assertEquals(c.day_of_week, set([5]))
     c = crontab(day_of_week='fri')
     self.assertEquals(c.day_of_week, set([5]))
     c = crontab(day_of_week='tuesday,sunday,fri')
     self.assertEquals(c.day_of_week, set([0, 2, 5]))
     c = crontab(day_of_week='mon-fri')
     self.assertEquals(c.day_of_week, set([1, 2, 3, 4, 5]))
     c = crontab(day_of_week='*/2')
     self.assertEquals(c.day_of_week, set([0, 2, 4, 6]))
예제 #13
0
    def add_adminconfig(self):
        topic_ex = Exchange("admin_topic",
                            type="topic",
                            auto_delete=True,
                            durable=True)

        self.CELERYBEAT_SCHEDULE.update({
            'admin_consumer': {
                'task': 'app.admin_manager.adminConsumer',
                'schedule': timedelta(seconds=2),
                'args': ('admin_' + self.CB_CLUSTER_TAG, )
            },
            'xdcr_consumer': {
                'task': 'app.admin_manager.xdcrConsumer',
                'schedule': timedelta(seconds=2),
                'args': ('xdcr_' + self.CB_CLUSTER_TAG, )
            },
            'do_backup': {  # every once per day
                'task': 'app.admin_manager.backup_task',
                'schedule': crontab(minute=0,
                                    hour=0),  #Execute daily at midnight.
                'args': [cfg.ENABLE_BACKUPS]
            },
        })

        self.CELERY_QUEUES = self.CELERY_QUEUES +\
            (
                # schedulable queue for multiple tasks
                self.make_queue('admin_tasks',  'admin_tasks.#', topic_ex),
            )

        self.CELERY_ROUTES = self.CELERY_ROUTES +\
        (   # route schedulable tasks both to same interal task queue
            {'app.admin_manager.adminConsumer':
                self.route_args('admin_tasks','admin_tasks.adminconsumer')},
            {'app.admin_manager.xdcrConsumer':
                self.route_args('admin_tasks','admin_tasks.xdcrconsumer')},
            {'app.admin_manager.backup_task':
                self.route_args('admin_tasks','admin_tasks.backuptasks')},
            {'app.rest_client_tasks.perform_admin_tasks':
                self.route_args('admin_tasks','admin_tasks.performadmin')},
            {'app.rest_client_tasks.perform_xdcr_tasks':
                self.route_args('admin_tasks','admin_tasks.performxdcr')},
        )
예제 #14
0
파일: test_task.py 프로젝트: berg/celery
 def test_crontab_spec_dow_formats(self):
     c = crontab(day_of_week=5)
     self.assertEquals(c.day_of_week, set([5]))
     c = crontab(day_of_week="5")
     self.assertEquals(c.day_of_week, set([5]))
     c = crontab(day_of_week="fri")
     self.assertEquals(c.day_of_week, set([5]))
     c = crontab(day_of_week="tuesday,sunday,fri")
     self.assertEquals(c.day_of_week, set([0, 2, 5]))
     c = crontab(day_of_week="mon-fri")
     self.assertEquals(c.day_of_week, set([1, 2, 3, 4, 5]))
     c = crontab(day_of_week="*/2")
     self.assertEquals(c.day_of_week, set([0, 2, 4, 6]))
예제 #15
0
def setup_schedule_callbacks(sender, callback):
    callbacks = settings.GOLEM_CONFIG.get('SCHEDULE_CALLBACKS')
    if not callbacks:
        return

    for name in callbacks:
        params = callbacks[name]
        print('Scheduling task {}: {}'.format(name, params))
        if isinstance(params, dict):
            cron = crontab(**params)
        elif isinstance(params, int):
            cron = params
        else:
            raise Exception('Specify either number of seconds or dict of celery crontab params (hour, minute): {}'.format(params))
        sender.add_periodic_task(   
            cron,
            callback.s(name),
        )
        print(' Scheduled for {}'.format(cron))
예제 #16
0
def add_data_with_wcr():
    result=[]
    for pages in rage(12):
        req = requests.get("https://wildfiretoday.com/recent-fires/page/"+pages+"/")

        sp = soup(req.content,'html5lib')

        data_name = sp.find_all('div',attrs={'class':"entry-content"})
        if data_name != 0:
            for d in data_name:
                list=d.find_all('p')
                temp = ""
                for ld in list:
                temp+=ld.getText()
                result.append(temp)
    fire_inf = fire_info.objects.all()
    data1=set()
    for d in fire_inf:
        data1.add(d.city+","+d.country)
    for d in data1:
        data = d.split(",")
        for res in result:
            if data[0] in res:
                nlp = spacy.load("en_core_web_sm")
                matcher = Matcher(nlp.vocab)
                pattern = [{"POS": "VERB"},{"POS": "ADV","op": "?"}, {"POS": "NUM"}, {"POS": "NOUN"}]
                matcher.add("Damage", None, pattern)
                doc1 = nlp(res)
                matches = matcher(doc1)
                for match_id, start, end in matches:
                    string_id = nlp.vocab.strings[match_id]  # Get string representation
                    span = doc1[start:end]  # The matched span
                    if span.text!="":
                        obj = fire_info.objects.get(city=data[0])
                        obj.details = span.text
                        obj.save()


@periodic_task(run_every=(crontab(minute='*/15')),name="update_db",ignore_result=True)
def update_db():
    logger.info("db started")
    #add_csv_data_to_db()
    add_area_prediction()
예제 #17
0
    def add_adminconfig(self):
        topic_ex  = Exchange("admin_topic", type="topic", auto_delete = True, durable = True)

        self.CELERYBEAT_SCHEDULE.update(
        {
            'admin_consumer': {
                'task': 'app.admin_manager.adminConsumer',
                'schedule': timedelta(seconds=2),
                'args' : ('admin_'+self.CB_CLUSTER_TAG,)
            },
             'xdcr_consumer': {
                 'task': 'app.admin_manager.xdcrConsumer',
                 'schedule': timedelta(seconds=2),
                'args' : ('xdcr_'+self.CB_CLUSTER_TAG,)
             },
            'do_backup': { # every once per day
                'task': 'app.admin_manager.backup_task',
                'schedule': crontab(minute=0, hour=0), #Execute daily at midnight.
                'args': [cfg.ENABLE_BACKUPS]
            },
        })

        self.CELERY_QUEUES = self.CELERY_QUEUES +\
            (
                # schedulable queue for multiple tasks
                self.make_queue('admin_tasks',  'admin_tasks.#', topic_ex),
            )

        self.CELERY_ROUTES = self.CELERY_ROUTES +\
        (   # route schedulable tasks both to same interal task queue
            {'app.admin_manager.adminConsumer':
                self.route_args('admin_tasks','admin_tasks.adminconsumer')},
            {'app.admin_manager.xdcrConsumer':
                self.route_args('admin_tasks','admin_tasks.xdcrconsumer')},
            {'app.admin_manager.backup_task':
                self.route_args('admin_tasks','admin_tasks.backuptasks')},
            {'app.rest_client_tasks.perform_admin_tasks':
                self.route_args('admin_tasks','admin_tasks.performadmin')},
            {'app.rest_client_tasks.perform_xdcr_tasks':
                self.route_args('admin_tasks','admin_tasks.performxdcr')},
        )
예제 #18
0
from celery.task.schedules import crontab
from django.contrib.auth.models import User
from celery.decorators import periodic_task
from rest_framework.authtoken.models import Token
from rest_framework import status
import requests
from .models import Menu, MenuItems
from datetime import datetime
from ..utils.slack_app import createMenuBlock, getChannelMembers, sendMenuMessage
'''
  Execute this task every day at 9 AM
'''


@periodic_task(run_every=(crontab(minute=0, hour=9)),
               name="getFoodServiceChannelMembers",
               ignore_result=True)
def getFoodServiceChannelMembers():
    members = getChannelMembers()
    # Use slackId as username and create User object, we're using is_staff -> False to check if is employee or admin
    for userId in members:
        User.objects.get_or_create(username=userId, is_staff=False)
    return True


'''
  Execute this task every day at 10 AM
'''


@periodic_task(run_every=(crontab(minute=0, hour=10)),
예제 #19
0
파일: tasks.py 프로젝트: ixof/allianceauth
            logger.info(
                "User %s main character id %s missing model. Clearning main character."
                % (user, auth.main_char_id))
            auth.main_char_id = ''
            auth.save()
            notify(
                user,
                "Main Character Reset",
                message=
                "Your specified main character no longer has a model.\nThis could be the result of "
                "an invalid API.\nYour main character ID has been reset.",
                level="warn")
    set_state(user)


@periodic_task(run_every=crontab(minute=0, hour="*/3"))
def run_api_refresh():
    if not EveApiManager.check_if_api_server_online():
        logger.warn(
            "Aborted scheduled API key refresh: API server unreachable")
        return

    for u in User.objects.all():
        refresh_user_apis.delay(u)


@task
def update_corp(id, is_blue=None):
    EveManager.update_corporation(id, is_blue=is_blue)

예제 #20
0
파일: tasks.py 프로젝트: mkcode/lernanta
from celery.task.schedules import crontab
from celery.decorators import periodic_task

from celery.task import Task

from models import update_metrics_cache
from projects.models import get_active_projects

#TODO celery.decorators module is being deprecated
@periodic_task(name="tracker.tasks.update_metrics", run_every=crontab(hour=4, minute=30, day_of_week="*"))
def update_metrics():
    # This runs every morning at 4:30a.m
    log = update_metrics.get_logger()
    log.debug('updating project pageview metrics')
    for project in get_active_projects():
        UpdateCourseMetrics.apply_async((project,))


class UpdateCourseMetrics(Task):
    """ Update metrics relevant to a specific project."""
    name = 'notifications.tasks.UpdateCourseMetrics'
    
    def run(self, project, **kwargs):
        log = self.get_logger(**kwargs)
        log.debug('updating pageview metrics for {0}'.format(project.name))
        update_metrics_cache(project)
예제 #21
0
from reminder.tasks import send_email

logger = get_task_logger(__name__)


def check_usd_rate():
    response = requests.get(urljoin(settings.NBP_API_URL, "usd/"))
    return response.json()["rates"][0]["mid"]


def check_gbp_rate():
    response = requests.get(urljoin(settings.NBP_API_URL, "gbp/"))
    return response.json()["rates"][0]["mid"]


@periodic_task(run_every=crontab(minute="0", hour="3"))
def check_rates():
    usd_rate = check_usd_rate()
    gbp_rate = check_gbp_rate()
    if usd_rate < float(settings.USD_THRESHOLD) or gbp_rate < float(
            settings.GBP_THRESHOLD):
        text = f"""
        USD: {usd_rate}
        GBP: {gbp_rate}
        """
        context = {"name": "USD | GBP rates", "text": text, "deadline": ""}

        message = get_template(
            template_name="watchers/email/rates_message.txt")
        html_message = get_template(
            template_name="watchers/email/rates_message.html")
예제 #22
0
from celery.task.schedules import crontab
from celery.decorators import periodic_task

from celery.task import Task

from models import update_metrics_cache
#from projects.models import get_active_projects

#TODO celery.decorators module is being deprecated
@periodic_task(name="tracker.tasks.update_metrics", run_every=crontab(hour=4, minute=30, day_of_week="*"))
def update_metrics():
    # This runs every morning at 4:30a.m
    #log = update_metrics.get_logger()
    #log.debug('updating project pageview metrics')
    #for project in get_active_projects():
    #    UpdateCourseMetrics.apply_async((project,))
    pass


class UpdateCourseMetrics(Task):
    """ Update metrics relevant to a specific project."""
    name = 'notifications.tasks.UpdateCourseMetrics'
    
    def run(self, project, **kwargs):
        log = self.get_logger(**kwargs)
        log.debug('updating pageview metrics for {0}'.format(project.name))
        update_metrics_cache(project)
예제 #23
0
from django.conf import settings
from django.utils import timezone
from django.contrib.auth import get_user_model

from celery.task.schedules import crontab
from celery.decorators import periodic_task
from celery.utils.log import get_task_logger

from opensteer.meetings.models import Standup, Checkin

User = get_user_model()
logger = get_task_logger(__name__)


@periodic_task(
    run_every=(crontab(hour=settings.MEETING_HOUR,
                       minute=settings.MEETING_MINUTE)),
    name="create_meetings",
    ignore_result=True,
)
def create_meetings():
    """Create meetings if required"""
    now = timezone.now()
    day, date, week = now.weekday(), now.date(), int(now.strftime("%U"))
    logger.info('Attempting to create meetings')
    Standup.create_meetings(date=date)
    if day == settings.CHECKIN_DAY:
        Checkin.create_meetings(year=now.year, week=week)
예제 #24
0
                content = "You have new messages"
                user.has_messages = False
                user.save()
            else:
                content = "You have no new messages"
            msg = render_to_string(
                'warcraft/periodic_template.html', {
                    'date': datetime.now(),
                    'often': minutes,
                    'name': user.firstName,
                    'message': content
                })
            send_mail('Periodic Email',
                      'nothing',
                      '*****@*****.**', [user.email],
                      fail_silently=False,
                      html_message=msg)


@periodic_task(run_every=(crontab(minute='*/10')),
               name="send_something_1",
               ignore_result=True)
def send_something_1():
    sending_mail(10)


@periodic_task(run_every=(crontab(minute='*/60')),
               name="send_something_2",
               ignore_result=True)
def send_something_2():
    sending_mail(60)
예제 #25
0
        html_doc = fetchurl(s)
        if html_doc:
            result = parsedoc(html_doc)
        for a in range(len(result)):
            style=result[a].find('div')['style']
            urls = re.findall('url\((.*?)\)', style)
            y=result[a].find_all("a",class_="clickable")
            desc = result[a].find_all("div",class_="news-card-content news-right-box")        
            try:
                description = str(desc[0].div.text)
                title = str(y[0].span.text)
                image = str(urls[0]).strip("'")
                try:
                    a = NewsWebsite(title = title, url = image, description= description, interest = i)
                    a.save()
                except IntegrityError as e:
                    pass
                print title
                print image
                print "----------------------------------------------------------------------------"
                print description
            except UnicodeEncodeError as e:
                continue
                # print unicodedata.normalize('NFKD', (desc[0].div.text).encode('ascii', 'ignore'))
            print "++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
        i = i+1
        
@periodic_task(run_every=(crontab(minute='*/30')), name="some_task", ignore_result=True)
def some_task():
    main()
예제 #26
0
파일: tasks.py 프로젝트: kenoseni/tweets
import os
from celery.decorators import task, periodic_task
from celery.task.schedules import crontab
from celery.utils.log import get_task_logger
from .models import Tip
from .python_tips import get_tweets, save_tweets_to_db, tweet_generator

# task logger
logger = get_task_logger(__name__)
name = os.getenv('TWEET_NAME', '')


# This task is set to run every 5 minutes
@periodic_task(run_every=(crontab(minute='*/5')), name=name)
def get_tweet():
    """Function that runs periodically to get tweets and save
    in database
    """
    logger.info('----------Getting tweets--------')
    tweets = get_tweets('python_tip')
    save_tweets_to_db(Tip, tweet_generator(tweets))
    logger.info('----------Done, tweets saved in database---------')
예제 #27
0
from __future__ import absolute_import, division, print_function

from celery.decorators import periodic_task, task
from celery.task.schedules import crontab
from celery.utils.log import get_task_logger
from django.core import management
from django.conf import settings

from timetable.school_mappers import SCHOOLS_MAP
from parsing.schools.active import ACTIVE_SCHOOLS

logger = get_task_logger(__name__)


@periodic_task(run_every=(crontab(hour=00, minute=00)),
               name="task_parse_current_registration_period",
               ignore_result=True)
def task_parse_current_registration_period(schools=None, textbooks=False):
    """Parse semesters in current registration period."""
    schools = set(schools or ACTIVE_SCHOOLS)
    for school in set(SCHOOLS_MAP) & schools:
        # Grab the most recent year.
        years = [SCHOOLS_MAP[school].active_semesters.items()[-1]]

        # Handle case where registration is for full academic year
        if SCHOOLS_MAP[school].full_academic_year_registration:
            if len(SCHOOLS_MAP[school].active_semesters) > 2:
                years.append(SCHOOLS_MAP[school].active_semesters.items()[-2])

            # Group all semesters into single parsing call for schools that
예제 #28
0
from __future__ import absolute_import, unicode_literals

from celery import task
from celery.decorators import periodic_task
from celery.task.schedules import crontab


@periodic_task(
    # run_every=(crontab(minute='*/1')),
    run_every=(crontab(hour=23, minute=50)),
    name="activity_statistic_accesses_by_day",
    ignore_result=True)
def activity_statistic_accesses_by_day():
    """
        Varre a tabela de acessos para recuperar os
        acessos unicos, e guarda em uma tabela separada.
        Essa task apenas coleta as informacoes.
        Executada todo dias as 23:50
    """
    from activity_statistic.reports import ActivityReports
    ActivityReports().unique_visits_today()


@periodic_task(
    # run_every=(crontab(minute='*/1')),
    run_every=(crontab(hour=8, minute=0)),
    name="activity_statistic_email_unique_hits_per_day",
    ignore_result=True)
def activity_statistic_email_unique_hits_per_day():
    """
        Envia o Email de Acessos unicos referente ao dia anterior
예제 #29
0
파일: tasks.py 프로젝트: relique/crm
from celery.task import periodic_task
from celery.task.schedules import crontab
from lib.parser import malaysia
from apps.customers.models import SuggestedCompany


@periodic_task(run_every=crontab(minute=0, hour=0))
def fetch_companies():
    list_ = malaysia.fetch()
    for data in list_:
        SuggestedCompany.objects.get_or_create(**data)
예제 #30
0
from celery.task.schedules import crontab
from celery.decorators import task, periodic_task
from celery.utils.log import get_task_logger

from .utils import update_algorithm_data

logger = get_task_logger(__name__)


@periodic_task(run_every=crontab(minute='*/30'),
               name="update_algorithm_data",
               ignore_result=True)
def update_algorithm_data_task():
    update_algorithm_data()
    logger.info("Updated")


@task(name="finish_visit_exploration_task")
def finish_visit_exploration_task(visit):
    logger.info("Visit expired")
    logger.info(visit)

    visit.expired = True
    visit.save()
    logger.info(visit.expired)
    return
예제 #31
0
from celery.task import periodic_task
from celery.task.schedules import crontab

PYPI_URL = 'https://pypi.python.org/pypi/sentry/json'
SENTRY_CHECKUPDATE_TIME = {
    'hour': 0,
    'minute': 0
}

logger = logging.getLogger(__name__)


@periodic_task(
    name='sentry.tasks.check_update',
    run_every=crontab(**SENTRY_CHECKUPDATE_TIME), queue='update')
def check_update():
    """
    Daily retrieving latest available Sentry version from PyPI
    """
    from sentry.models import set_sentry_version

    result = fetch_url_content(PYPI_URL)

    if result == BAD_SOURCE:
        return

    try:
        (_, _, body) = result

        version = json.loads(body)['info']['version']
예제 #32
0
app.config_from_object('django.conf:settings', namespace='CELERY')

# autodiscover tasks in any app
app.autodiscover_tasks(settings.INSTALLED_APPS)


@app.task(bind=True)
def debug_task(self):
    print('Request: {0!r}'.format(self.request))

# set schedule
# from parker.apps.interactions.schedule import SCHEDULE
# app.conf.CELERYBEAT_SCHEDULE = SCHEDULE

# @periodic_task(run_every=crontab(hour="*", minute="*", day_of_week="*"))
# def notificar_resumen_noche():
#     # from cutreronte_v2.telegram import bot
#     print("hola")

app.conf.beat_schedule = {
    'sniffer-mantenimiento': {
        'task': 'sniffer.tasks.mantenimiento',
        'schedule': crontab(hour="4", minute="0", day_of_week="*"),
        # 'args': ("uno dos", '46167421'),
    },
    'expulsar-gente-noche': {
        'task': 'cutreronte.tasks.expulsar_todos',
        'schedule': crontab(hour="1", minute="0", day_of_week="*"),
    },
}
예제 #33
0
from celery import shared_task
from .models import Event
from datetime import datetime, timedelta
from django.core.mail import send_mail
from celery.decorators import periodic_task
from celery.task.schedules import crontab


@periodic_task(run_every=(crontab(minute='*')), name="event_send_mail")
def event_send_mail():
    events = Event.objects.filter(
        event_date__range=(datetime.now() + timedelta(minutes=59),
                           datetime.now() + timedelta(minutes=61)))
    print(events)
    for event in events:
        try:
            send_mail("Напоминание о событии",
                      str(event.title) + "начинаеся через час",
                      "*****@*****.**", [
                          "*****@*****.**",
                      ])
        except Exception as e:
            print(e)
예제 #34
0
import logging
from django.core.cache import cache, get_cache
from celery.decorators import periodic_task
from celery.task.schedules import crontab
from antenna.rb.models import Group
from antenna.api.util_functions import *

logger = logging.getLogger('rb.standard')

@periodic_task(name='content_rec.refresh', ignore_result=True,
               run_every=(crontab(minute=0))) # Hourly
def refresh_recommended_content():
    groups = Group.objects.filter(id=3714) # Bustle only for now
    for group in groups:
        try:
            group_id = group.id
            logger.info('UPDATE RECOMMENDED CONTENT CACHE: ' + str(group_id))
            if cache.get('LOCKED_recommended_content_' + str(group_id)) is None:
                cache_data = getRecommendedContent(group_id)
                logger.info(cache_data)
                try:
                    cache.set('LOCKED_popular_content_' + str(group_id),'locked',15)
                    cache.set('recommended_content_' + str(group_id), cache_data )
                    cache.delete('LOCKED_recommended_content_' + str(group_id))
                except Exception, ex:
                    logger.info(ex)
                try:
                    get_cache('redundant').set('LOCKED_recommended_content_' + str(group_id),'locked',15)
                    get_cache('redundant').set('recommended_content_' + str(group_id), cache_data )
                    get_cache('redundant').delete('LOCKED_recommended_content_' + str(group_id))
                except Exception, ex:
예제 #35
0
from celery.task.schedules import crontab
from celery.decorators import periodic_task
from celery.utils.log import get_task_logger
from datetime import datetime
from .views import (
    getLatestEarthQuake,
    getLatestShakemap,
)
from dashboard.views import classmarkerGet

logger = get_task_logger(__name__)


@periodic_task(run_every=(crontab(hour='*')))
def updateLatestEarthQuake():
    getLatestEarthQuake()


@periodic_task(run_every=(crontab(hour='*')))
def updateLatestShakemap():
    getLatestShakemap(True)
예제 #36
0
파일: tasks.py 프로젝트: jlopker/modernomad
from modernomad.backup import BackupManager
from django.contrib.sites.models import Site
import datetime
from django.utils import timezone
import requests
import json
from django.core import urlresolvers

import logging
logger = logging.getLogger(__name__)

#@periodic_task(run_every=crontab(hour=22, minute=53, day_of_week="*"))  
#def test():      
#    print "HELLO WORLD"                    

@periodic_task(run_every=crontab(hour=5, minute=30))
def send_guests_residents_daily_update():
	locations = Location.objects.all()
	for location in locations:
		guests_residents_daily_update(location)

@periodic_task(run_every=crontab(hour=4, minute=30))
def send_admin_daily_update():
	locations = Location.objects.all()
	for location in locations:
		admin_daily_update(location)

#@periodic_task(run_every=crontab(minute="*")) # <-- for testing
@periodic_task(run_every=crontab(hour=5, minute=0))
def send_guest_welcome():
	# get all reservations WELCOME_EMAIL_DAYS_AHEAD from now. 
예제 #37
0
파일: tasks.py 프로젝트: Sateanu/django-sis
from datetime import date
import logging
from django.conf import settings

from celery.task.schedules import crontab
from celery.decorators import periodic_task
from celery import task

import sys

if 'ecwsp.work_study' in settings.INSTALLED_APPS:
    
    if settings.SYNC_SUGAR:
        from ecwsp.work_study.sugar_sync import SugarSync
        modify_date_minutes = int(Configuration.get_or_default("sync sugarcrm minutes",default="30").value)
        @periodic_task(run_every=crontab(minute='*/%s' % (modify_date_minutes,)))
        def update_contacts_from_sugarcrm():
            sugar_sync = SugarSync()
            sugar_sync.update_contacts_from_sugarcrm()
        
        @task()
        def update_contact_to_sugarcrm(contact):
            sugar_sync = SugarSync()
            sugar_sync.update_contact(contact)
    
    @periodic_task(run_every=crontab(hour=20, minute=27))
    def email_cra_nightly():
        """ Email CRA nightly time sheet and student interaction information
        """
        from_email = Configuration.objects.get_or_create(name="From Email Address")[0].value
        cras = CraContact.objects.filter(email=True)
예제 #38
0
파일: tasks.py 프로젝트: relique/izba
# -*- coding: utf-8 -*-

import datetime

from django.conf import settings
from django.template import loader
from django.template import context
from celery.decorators import periodic_task
from celery.task.schedules import crontab
from apps.reports import utils


@periodic_task(run_every=crontab(day_of_month="1,15"))
def send_report():
    data = {}

    data["init_date"] = datetime.datetime.now().strftime("%Y-%m-01")
    data["date"] = datetime.datetime.now().strftime("%Y-%m-%d")
    range_ = [data["init_date"], data["date"]]

    data["parsed_ads"] = utils.get_parsed_ads_count()
    data["lim_parsed_ads"] = utils.get_parsed_ads_count(range_)
    data["user_ads"] = utils.get_user_ads_count()
    data["lim_user_ads"] = utils.get_user_ads_count(range_)
    data["users_count"] = utils.get_users_count()
    data["lim_users_count"] = utils.get_users_count(range_)
    data["total_ads"] = data["parsed_ads"] + data["user_ads"]
    data["total_lim_ads"] = data["lim_parsed_ads"] + data["lim_user_ads"]

    tmpl = loader.get_template("reports/pdf.html")
    html = tmpl.render(context.Context(data))
예제 #39
0
from celery.task.schedules import crontab
from celery.decorators import periodic_task
from main.memcache import memcache
from settings import CELERY_CACHE_TIMEOUT


CHECK_HOST_KEY = "check_passive_host_%s"
CHECK_NOTIFICATION_KEY = "check_notification_%s"

__author__ = "apredoi,adrianomarques"

from status_cron.views import check_passive_url_task, check_passive_port_task, send_notification_task
from main.models import Module, Notification


@periodic_task(run_every=crontab(hour="*", minute="*/1", day_of_week="*"))
def inform_self_status():
    """This adds the ability to celery to send its own status to site-status
    as an active agent.
    """
    if settings.INFORM_SELF_STATUS:
        api_url = "%(STATUS_URL)s/api/report_status?module_id=%(STATUS_MODULE_ID)s&module_api=%(STATUS_API_KEY)s&module_secret=%(STATUS_API_SECRET)s&module_status=%(AGENT_STATUS)s"
        api_url = api_url % dict(
            STATUS_URL=settings.INFORM_SELF_STATUS_URL,
            STATUS_MODULE_ID=settings.INFORM_SELF_STATUS_MODULE_ID,
            STATUS_API_KEY=settings.INFORM_SELF_STATUS_API_KEY,
            STATUS_API_SECRET=settings.INFORM_SELF_STATUS_API_SECRET,
            AGENT_STATUS="on-line",
        )
        logging.info("Making status call with url %s" % api_url)
        agent_update = urllib2.urlopen(api_url)
예제 #40
0
weekday_number_to_name = {
	0: "Monday",
	1: "Tuesday",
	2: "Wednesday",
	3: "Thursday",
	4: "Friday",
	5: "Saturday",
	6: "Sunday"
}


#@periodic_task(run_every=crontab(hour=22, minute=53, day_of_week="*"))  
#def test():      
#    print "HELLO WORLD"                    

@periodic_task(run_every=crontab(hour=4, minute=30))
#@periodic_task(run_every=crontab(minute="*")) # <-- for testing
def admin_today_notification():
	today = datetime.datetime.today() 
	arriving_today = Reservation.objects.filter(arrive=today).filter(status='confirmed')
	departing_today = Reservation.objects.filter(depart=today).filter(status='confirmed')
	domain = Site.objects.get_current().domain
	plaintext = get_template('emails/admin_today_notification.txt')
	c = Context({
		'arriving' : arriving_today,
		'departing' : departing_today,
		'domain': domain,
	})
	text_content = plaintext.render(c)
	subject = "[Embassy SF] Guest Arrivals and Departures for %s" % (str(today))
	sender = settings.DEFAULT_FROM_EMAIL
예제 #41
0
            if not is_skip:
                logger.info('Going to rm file: %s, filesize=%s', file, localfilesz)
                rmfile(None, file)
        except Exception as e:
            logger.exception('check rm file: %s exception', f)


    if download_count > 0:
        ThreadPool.initialize()
        for (f, filesize) in downloadfiles:
            url = join(hostname, f)
            ThreadPool.add_task_with_param(download, url)
        ThreadPool.wait_for_complete(timeout=3600*10)

        logger.warn("complete downloads")
    
        ThreadPool.clear_task()
        ThreadPool.stop()

    return 'OK'


@periodic_task(run_every=crontab(hour='*', minute='*/1'))
def every_monday_morning():
    logger.info("This is run every Monday morning at 7:30")
    
if __name__ == "__main__":
    download("http://www.blog.pythonlibrary.org/wp-content/uploads/2012/06/wxDbViewer.zip")    

예제 #42
0
from celery.decorators import periodic_task
from django.core import management
from celery.task.schedules import crontab
from celery.utils.log import get_task_logger

logger = get_task_logger(__name__)




@periodic_task(run_every=(crontab(minutes='*/5')), name='update_hash_scores', ingore_result=True)
def update_hash(): 
    logger.info("update_hash")
    management.call_command('update_hash')
예제 #43
0
from celery.task.schedules import crontab
from celery.decorators import periodic_task
from . import scrapers
from celery.utils.log import get_task_logger
from datetime import datetime

logger = get_task_logger(__name__)


@periodic_task(run_every=(crontab(hour="*", minute="*", day_of_week="*")))
def loan():
    logger.info("Start task")
    now = datetime.now()
    result = scrapers.scraper_example(now.day, now.minute)
    logger.info("Task finished: result = %i" % result)
예제 #44
0
from celery.task.schedules import crontab
from celery.decorators import task, periodic_task
from celery.utils.log import get_task_logger

from .utils import update_algorithm_data

logger = get_task_logger(__name__)


@periodic_task(
    run_every=crontab(minute='*/30'),
    name="update_algorithm_data",
    ignore_result=True
)
def update_algorithm_data_task():
    update_algorithm_data()
    logger.info("Updated")


@task(name="finish_visit_exploration_task")
def finish_visit_exploration_task(visit):
    logger.info("Visit expired")
    logger.info(visit)

    visit.expired = True
    visit.save()
    logger.info(visit.expired)
    return
    
예제 #45
0
파일: tasks.py 프로젝트: relique/izba
import datetime

from django.contrib.humanize.templatetags import humanize
from celery.decorators import periodic_task
from celery.task.schedules import crontab
from common.utils import currency
from apps.social import models as social
from apps.social.utils import post_to_fb
from apps.motors.models import Automobile
from apps.realestate.models import Apartment
from apps.electronics.models import Telephone

BASEURL = 'http://www.izba.kg'

@periodic_task (run_every=crontab(hour=10, minute=0, 
                                  day_of_week='Mon,Wed,Fri,Sun'))
def facebook_autos():
    try:
        item = Automobile.objects.filter(
            published=True, 
            year__range=[1998, datetime.date.today().year],
            pub_date__range=[datetime.datetime.now()-datetime.timedelta(hours=10), 
                             datetime.datetime.now()]
        ).exclude(image_1='').order_by('-pub_date')[0]
    except IndexError as e:
        return

    try:
        social.FacebookPost.objects.get(item=item)
    except social.FacebookPost.DoesNotExist:
        pass
예제 #46
0
파일: tasks.py 프로젝트: linea-it/dri
from __future__ import absolute_import, unicode_literals

from celery import task
from celery.decorators import periodic_task
from celery.task.schedules import crontab


@periodic_task(
    # run_every=(crontab(minute='*/1')),
    run_every=(crontab(hour=23, minute=50)),
    name="activity_statistic_accesses_by_day",
    ignore_result=True
)
def activity_statistic_accesses_by_day():
    """
        Varre a tabela de acessos para recuperar os
        acessos unicos, e guarda em uma tabela separada.
        Essa task apenas coleta as informacoes.
        Executada todo dias as 23:50
    """
    from activity_statistic.reports import ActivityReports
    ActivityReports().unique_visits_today()


@periodic_task(
    # run_every=(crontab(minute='*/1')),
    run_every=(crontab(hour=8, minute=0)),
    name="activity_statistic_email_unique_hits_per_day",
    ignore_result=True
)
def activity_statistic_email_unique_hits_per_day():
예제 #47
0
파일: tasks.py 프로젝트: csrs/modernomad
weekday_number_to_name = {
	0: "Monday",
	1: "Tuesday",
	2: "Wednesday",
	3: "Thursday",
	4: "Friday",
	5: "Saturday",
	6: "Sunday"
}


#@periodic_task(run_every=crontab(hour=22, minute=53, day_of_week="*"))  
#def test():      
#    print "HELLO WORLD"                    

@periodic_task(run_every=crontab(hour=4, minute=30))
@periodic_task(run_every=crontab(minute="*")) # <-- for testing
def admin_today_notification():
	today = datetime.datetime.today() 
	arriving_today = Reservation.objects.filter(arrive=today).filter(status='confirmed')
	departing_today = Reservation.objects.filter(depart=today).filter(status='confirmed')
	domain = Site.objects.get_current().domain
	plaintext = get_template('emails/admin_today_notification.txt')
	c = Context({
		'arriving' : arriving_today,
		'departing' : departing_today,
		'domain': domain,
	})
	text_content = plaintext.render(c)
	subject = "[" + settings.EMAIL_SUBJECT_PREFIX + "] Guest Arrivals and Departures for %s" % (str(today))
	sender = settings.DEFAULT_FROM_EMAIL
예제 #48
0
from __future__ import absolute_import, division, print_function

from celery.decorators import periodic_task, task
from celery.task.schedules import crontab
from celery.utils.log import get_task_logger
from django.core import management
from django.conf import settings

from timetable.school_mappers import SCHOOLS_MAP
from parsing.schools.active import ACTIVE_SCHOOLS

logger = get_task_logger(__name__)


@periodic_task(
    run_every=(crontab(hour=00, minute=00)),
    name="task_parse_current_registration_period",
    ignore_result=True
)
def task_parse_current_registration_period(schools=None, textbooks=False):
    """Parse semesters in current registration period."""
    schools = set(schools or ACTIVE_SCHOOLS)
    for school in set(SCHOOLS_MAP) & schools:
        # Grab the most recent year.
        years = [SCHOOLS_MAP[school].active_semesters.items()[-1]]

        # Handle case where registration is for full academic year
        if SCHOOLS_MAP[school].full_academic_year_registration:
            if len(SCHOOLS_MAP[school].active_semesters) > 2:
                years.append(SCHOOLS_MAP[school].active_semesters.items()[-2])
예제 #49
0
            try:
                have_lock = lock.acquire(blocking=False)
                if have_lock:
                    ret_value = run_func(*args, **kwargs)
            finally:
                if have_lock:
                    lock.release()

            return ret_value

        return _caller

    return _dec(function) if function is not None else _dec


@periodic_task(run_every=crontab(minute="*/30"))
def run_ts3_group_update():
    if settings.ENABLE_AUTH_TEAMSPEAK3 or settings.ENABLE_BLUE_TEAMSPEAK3:
        logger.debug("TS3 installed. Syncing local group objects.")
        Teamspeak3Manager._sync_ts_group_db()


def disable_teamspeak():
    if settings.ENABLE_AUTH_TEAMSPEAK3:
        logger.warn(
            "ENABLE_AUTH_TEAMSPEAK3 still True, after disabling users will still be able to create teamspeak accounts")
    if settings.ENABLE_BLUE_TEAMSPEAK3:
        logger.warn(
            "ENABLE_BLUE_TEAMSPEAK3 still True, after disabling blues will still be able to create teamspeak accounts")
    for auth in AuthServicesInfo.objects.all():
        if auth.teamspeak3_uid:
예제 #50
0
파일: tasks.py 프로젝트: Lefford/test--case
from celery.decorators import periodic_task
from celery.task.schedules import crontab
from hotels.management.commands.importcity import save_city
from hotels.management.commands.importhotel import save_hotel
from utils import get_remote_content

URL= 'http://python-demo.maykin.nl/'
USERNAME= '******'
PASSWORD='******' 

@periodic_task(run_every=crontab())
def import_city():
"""
runs city import every minute 
"""
	city_uri = 'city.csv'  

	data = get_remote_content(URL+city_uri, USERNAME, PASSWORD)
	save_city(data)

	
@periodic_task(run_every=crontab(minute='*/2'))
def import_hotel():
"""
runs hotel import every two minutes 
"""
	hotel_uri = 'hotel.csv'
	
	data = get_remote_content(URL+hotel_uri, USERNAME, PASSWORD)
	save_hotel(data)
	
예제 #51
0
from datetime import timedelta

from celery.decorators import periodic_task, task
from celery.task.schedules import crontab

from wuvt import app
from wuvt import db
from wuvt import redis_conn
from wuvt.celeryconfig import make_celery
from wuvt.trackman.lib import get_duplicates, logout_all, enable_automation
from wuvt.trackman.models import AirLog, DJSet, Track, TrackLog

celery = make_celery(app)


@periodic_task(run_every=crontab(hour=3, minute=0))
def deduplicate_tracks():
    dups = get_duplicates(Track, ["artist", "title", "album", "label"])
    for artist, title, album, label in dups:
        track_query = Track.query.filter(
            db.and_(Track.artist == artist, Track.title == title, Track.album == album, Track.label == label)
        ).order_by(Track.id)
        count = track_query.count()
        tracks = track_query.all()
        track_id = int(tracks[0].id)

        # update TrackLogs
        TrackLog.query.filter(TrackLog.track_id.in_([track.id for track in tracks[1:]])).update(
            {TrackLog.track_id: track_id}, synchronize_session=False
        )
예제 #52
0
from celery.decorators import periodic_task

logger = logging.getLogger(__name__)


@task
def send_email(template_name, subject, from_email, recipients, context, bcc=None, attachments=None):
    logger.info('Sending {} email to {}'.format(template_name, from_email))
    return send_html_email(template_name, subject, from_email, recipients, context, bcc, attachments)

@task
def send_text(template_name, recipient, context):
    logger.info('Sending {} text to {}'.format(template_name, recipient))
    return send_text_utility(template_name, recipient, context)

@periodic_task(run_every=crontab(hour="*/2", minute=0))
def send_reminder_chooseslot_emails():
    if not audition_signup_open():
        return

    one_hour_ago = timezone.now() - datetime.timedelta(hours=1)
    auditioners = Auditioner.objects.filter(
        time_registered__lte=one_hour_ago,
        auditionslot=None,
        sent_slot_reminder_email=False
    )
    for auditioner in auditioners:
        send_email.delay(
            template_name='auditionslot_signup_reminder',
            subject='Reminder: Choose an audition slot',
            from_email=settings.HARVARD_TALENT_EMAIL,
예제 #53
0
파일: tasks.py 프로젝트: linea-it/dri
from __future__ import absolute_import, unicode_literals

from celery import task
from celery.decorators import periodic_task
from celery.task.schedules import crontab

from django.conf import settings
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Garbage Colector %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #
@periodic_task(
    run_every=(crontab(minute='*/30')),
    #run_every=10.0,
    name="garbage_colector",
    ignore_result=True
)
def garbage_colector():
    """
    Executa rotinas de limpesa
    """
    # Limpar os produtos
    from product.garbagecolector import GarbageColectorProduct
    GarbageColectorProduct().purge_products_expiration_time()
예제 #54
0
from celery.task.schedules import crontab
from celery.decorators import periodic_task
from celery.decorators import task
from advocoders import utils
from advocoders.models import Profile


@periodic_task(run_every=crontab(hour='*/4', minute='0', day_of_week='*'))
def update_feeds():
    utils.update_feeds()


@task
def update_feed(profile_id, provider):
    profile = Profile.objects.get(pk=profile_id)
    if provider == 'blog':
        utils.update_feed(profile, 'blog', profile.blog)
        return
    for social_auth in profile.user.social_auth.all():
        if social_auth.provider == provider:
            utils.update_feed(profile, provider,
                social_auth.extra_data.get('rss_url'))
예제 #55
0
import logging
from .models import MomoRequest
from payments.celery import app
from celery.task.schedules import crontab
from celery.decorators import periodic_task
from celery.utils.log import get_task_logger
from .mtn import MtnMomo

logger = get_task_logger(__name__)


@periodic_task(run_every=(crontab(minute='*/1')), name="payment_status_task", ignore_result=True)
def get_payment_status_task():

	momorequests = MomoRequest.objects.exclude(status="SUCCESSFUL")

	for request in momorequests:

	    transdetail = MtnMomo.collectionTransactionStatus(request.transref)
	    request.status = transdetail['status']
	    request.save()
	    logger.info(transdetail)



예제 #56
0
        return False


def set_state(user):
    state = determine_membership_by_user(user)
    logger.debug("Assigning user %s to state %s" % (user, state))
    if state == "MEMBER":
        make_member(user)
    elif state == "BLUE":
        make_blue(user)
    else:
        disable_member(user)


# Run every minute
@periodic_task(run_every=crontab(minute="*/1"))
def run_databaseUpdate():
    logger.debug("Starting database update.")
    users = User.objects.all()
    if is_teamspeak3_active():
        logger.debug("TS3 installed. Syncing local group objects.")
        Teamspeak3Manager._sync_ts_group_db()
    for user in users:
        logger.debug("Initiating database update for user %s" % user)
        groups = user.groups.all()
        logger.debug("User has groups %s" % groups)
        syncgroups = SyncGroupCache.objects.filter(user=user)
        logger.debug("User has syncgroups %s" % syncgroups)
        add_to_databases(user, groups, syncgroups)
        remove_from_databases(user, groups, syncgroups)
예제 #57
0
    return Operation.objects.filter(
        status__in=status_filters,
        modified__lt=datetime.now() - timedelta(hours=1)
    ).order_by("-modified")


def slow_operations_other_than_submitted():
    return Operation.objects.filter(
        status__in=["enqueued", "in progress"],
        modified__lt=datetime.now() - timedelta(hours=1)
    )


@periodic_task(
    run_every=crontab(
        hour="7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23",
        minute="3", day_of_week="*"))
def check_for_slow_operations():
    operations = slow_operations()
    if operations.count() > 0:
        other_than_submitted = slow_operations_other_than_submitted()
        if other_than_submitted.count() > 0:
            # there are operations that are enqueued or in progress
            # so sysadmins need to know too
            send_slow_operations_email(operations)
        else:
            # it's just 'submitted' operations that are slow
            # so it's just the video team's problem
            send_slow_operations_to_videoteam_email(operations)

    # else, no slow operations to warn about. excellent.
예제 #58
0
from celery.task.schedules import crontab
import requests
import datetime
from celery.decorators import periodic_task
from celery.utils.log import get_task_logger
from .models import RecurrentSheduleLine
from apps.utils.models import Currency, CurrencyRateLine

logger = get_task_logger(__name__)


@periodic_task(run_every=(crontab(minute=0, hour='*/3', day_of_week="*")))
def register_recurrent_transaction():

    logger.info("Starting post shedule transactions)")

    recurrents = RecurrentSheduleLine.objects.all_recurrents_pending()

    for recurrent in recurrents:
        recurrent.post_transaction()
        recurrent.log_post_date = datetime.date.today()
        recurrent.save()
        recurrent.shedule.set_next_shedule_line()
        logger.info("recurrent #" + recurrent.pk)

    return "Registering recurrent transaction done..."


@periodic_task(run_every=(crontab(minute=0, hour='*/8')))
def update_currency_rate():
    logger.info("Starting updating currency rate")
예제 #59
0
    local('{command} | gzip > {fname}'.format(command=command,
                                              fname=day_fname))

    if not is_deploy:
        local('cp {day_fname} {week_fname}'.format(day_fname=day_fname,
                                                   week_fname=week_fname))
    else:
        files = local('ls -r {names}*'.format(names=day_fname.split('-')[0]),
                      capture=True)
        for file in files.split('\n')[3:]:
            local('rm {file}'.format(file=file))

    return [day_fname, week_fname, is_deploy]


@periodic_task(run_every=crontab(minute=0, hour=1))
def backup_postgres(is_deploy=False):

    if is_deploy and is_database_synchronized(DEFAULT_DB_ALIAS):
        print('is_database_synchronized')
        return 'is_database_synchronized'

    folder = '{}/backups'.format(settings.BASE_DIR)
    local('mkdir -p {folder}/postgres'.format(folder=folder))

    fname = '{folder}/postgres/%name%.backup.gzip'.format(folder=folder)
    command = 'export PGPASSWORD={password} && pg_dumpall --host=postgres --username={user}'.format(
        password=os.environ.get('POSTGRES_PASSWORD'),
        user=os.environ.get('POSTGRES_USER'))

    return generate_backup(fname, command, is_deploy)
from __future__ import absolute_import, unicode_literals

import logging
logger = logging.getLogger(__name__)

import random
from datetime import datetime as dt
from datetime import timedelta
from decimal import *

from django.utils.timezone import make_aware
from django.core.management import call_command
from django.conf import settings
from celery.task.schedules import crontab
from celery.decorators import periodic_task

from .models import User, Account, Transaction


@periodic_task(run_every=crontab(hour=0))
def generate_transaction():
    pass
    #call_command('mocktransactions', '--numdays=1')