def schedule(gmclient, opts):
    sched = Scheduler()
    sched.daemonic = False

    @sched.cron_schedule(hour="2-23", minute=00)
    def movie_task():
        task = MovieTask(opts)
        gmclient.submit_job(task)

    @sched.cron_schedule(hour="2-23", minute=10)
    def series_task():
        task = SeriesTask(opts)
        gmclient.submit_job(task)

    @sched.cron_schedule(hour="2-23", minute=30)
    def useries_task():
        task = UpdatingSeriesTask(opts)
        gmclient.submit_job(task)

    @sched.cron_schedule(hour="2-23", minute=00)
    def error_episode_task():
        task = DownloadErrorEpisodeTask(opts)
        gmclient.submit_job(task)

    sched.start()
 def load(self):
     sched = Scheduler()
     sched.daemonic = False
     # Schedules job_function to be run on the third Friday
 # of June, July, August, November and December at 00:00, 01:00, 02:00 and 03:00
     sched.add_cron_job(self.job_function, second='*/3')
     sched.start()
Esempio n. 3
0
def main():
    global previous_disk_io, previous_net_io
    elastic_ip = config.get('logging', {}).get('logserver',
                                               {}).get('node', None)
    elastic_port = config.get('logging', {}).get('logserver',
                                                 {}).get('elastic',
                                                         {}).get('port', 9200)

    if not elastic_ip or not elastic_port:
        log.error(
            "Elasticsearch cluster not configured in the seed. There is no need to gather stats on this box."
        )
        sys.exit(1)

    scheduler = Scheduler()
    cur_host = get_hostname()
    cur_ip = get_hostip()
    es = elasticsearch.Elasticsearch([{
        'host': elastic_ip,
        'port': elastic_port
    }])

    scheduler.add_interval_job(calculate_system_metrics,
                               seconds=60,
                               kwargs={
                                   "es": es,
                                   "cur_ip": cur_ip,
                                   "cur_host": cur_host
                               })

    if is_riak(cur_ip, cur_host):
        scheduler.add_interval_job(calculate_solr_metrics,
                                   seconds=60,
                                   kwargs={
                                       "es": es,
                                       "cur_ip": cur_ip,
                                       "cur_host": cur_host
                                   })

        scheduler.add_interval_job(calculate_riak_metrics,
                                   seconds=60,
                                   kwargs={
                                       "es": es,
                                       "cur_ip": cur_ip,
                                       "cur_host": cur_host
                                   })

    scheduler.daemonic = False
    scheduler.start()
Esempio n. 4
0
def main():
	sched = Scheduler()  
	# mysql_engine = create_engine('mysql://root:@localhost:3306/fengine?charset=utf8',encoding = "utf-8",echo =True)
	mysql_engine = get_db_engine()
	sched.daemonic = False  
	print "Starting index engine......"
	job_store = SQLAlchemyJobStore(engine = mysql_engine)
	sched.add_jobstore(job_store, 'default')

	list_spider_job(sched) #将Spider的任务加入队列
	scan_engine_job(sched) #将主索引服务加入任务队列

	# sched.add_cron_job(scan_loan_items_job,hour='*', minute='*', second='5')  
	#将索引Job加入到调度系统,按照每5分钟的频率启动
	# engine_name = 'engine.py'
	# python_loc = os.path.join(os.getcwd(), engine_name)
	# sched.add_interval_job(python_job_func, seconds =5, name = engine_name, args = [python_loc])
	# list_spider_job(sched)
	sched.start()
Esempio n. 5
0
def main():
    sched = Scheduler()
    # mysql_engine = create_engine('mysql://root:@localhost:3306/fengine?charset=utf8',encoding = "utf-8",echo =True)
    mysql_engine = get_db_engine()
    sched.daemonic = False
    print "Starting index engine......"
    job_store = SQLAlchemyJobStore(engine=mysql_engine)
    sched.add_jobstore(job_store, 'default')

    list_spider_job(sched)  #将Spider的任务加入队列
    scan_engine_job(sched)  #将主索引服务加入任务队列

    # sched.add_cron_job(scan_loan_items_job,hour='*', minute='*', second='5')
    #将索引Job加入到调度系统,按照每5分钟的频率启动
    # engine_name = 'engine.py'
    # python_loc = os.path.join(os.getcwd(), engine_name)
    # sched.add_interval_job(python_job_func, seconds =5, name = engine_name, args = [python_loc])
    # list_spider_job(sched)
    sched.start()
Esempio n. 6
0
 def __init__(self):
     sched = Scheduler()
     sched.daemonic = False
     script = scriptload()
     monthly = monthly_work()
     daily = daily_work()
     sched.add_cron_job(script.rise, month='1,4,7,10', day='10', hour='1',minute='1')
     # sched.add_cron_job(script.kpi, month='1-12', day='1', hour='1',minute='1')  #每月1号凌晨2点创建当月评估表
     sched.add_cron_job(script.total, month='1-12', day='1', hour='2',minute='1')  #每月1号凌晨2点创建上月余额规模
     sched.add_cron_job(script.perform, month='1-12', day='1', hour='3',minute='1')  #每月1号凌晨3点初始化当月业绩表
     # sched.add_cron_job(script.first, month='1-12', day='1', hour='5',minute='1')  #计算历史贷款笔数分成
     sched.add_cron_job(monthly.run,month='1-12',day='1',hour='4',minute='1') #计算上月利润贡献
     sched.add_cron_job(daily.run,month='1-12',day='1',hour='1',minute='1')#有效管数、逾期更新、未结算贷款、还款更新 
     sched.add_cron_job(script.linePayment, month='1-12',day='1',hour='2',minute='1')#月初更新诚易贷并计算绩效
     sched.add_cron_job(script.yidaitong, month='1-12',day='1',hour='5',minute='1')#月初更新易贷通模拟利润并计算绩效
     # sched.add_cron_job(script.yidaitong, max_runs=1)
     sched.add_cron_job(script.paymentMonth, month='1-12',day='2',hour='5',minute='1')#月初计算上月工资
     
     sched.start()
import datetime
import time
import os
from apscheduler.scheduler import Scheduler

# Start the scheduler
sched = Scheduler()
sched.daemonic = False
sched.start()

def job_function():
    os.system(" spark-submit --class ClimateClustering --master local[4] CSYE7374_Final_Spark-assembly-1.0.jar ec2-52-20-252-81.compute-1.amazonaws.com")
    os.system(" spark-submit --class ClimateARIMA --master local[4] CSYE7374_Final_Spark-assembly-1.0.jar ec2-52-20-252-81.compute-1.amazonaws.com")
    print(datetime.datetime.now())
    time.sleep(20)

# Schedules job_function to be run once each minute
sched.add_job(job_function, "cron", hour='0', minute='0',second='0')

import datetime
import logging
import time

from apscheduler.scheduler import Scheduler

gconfig = {'apscheduler.threadpool.core_threads':2,
           'apscheduler.threadpool.max_threads':50,
           'apscheduler.threadpool.keepalive':4,
           'apscheduler.misfire_grace_time':1,
           'apscheduler.coalesce':True}

logging.basicConfig()
sched = Scheduler()
sched.daemonic = True
sched.configure(gconfig)
sched.start()

def do_job(a):
    print "This is " + str(a)
    print repr(sched._threadpool), str(sched._threadpool._queue.qsize())
    print time.strftime('%Y-%m-%d %H:%M:%S')

current_date = datetime.datetime.now()
for i in range(0,10):
    start_date = current_date+datetime.timedelta(seconds=i)
    sched.add_interval_job(do_job, seconds=10, args=('number ' + str(i),), start_date=start_date)

while(True):
    #print repr(pool)
Esempio n. 9
0
#!/usr/bin/python2.6
#coding=utf-8
#author [email protected] 
#edit 2014-09-01 10:56:27

import apscheduler
from apscheduler.scheduler import Scheduler
from logger import Log

logger = Log().getLog()

sched = Scheduler()
sched.daemonic = False  #非daemon线程


def job_events_listener(jobEvent):
    '''监听任务事件
    '''
    if jobEvent.code == apscheduler.events.EVENT_JOB_EXECUTED:
        pass
        # 正常执行任务
        logger.info("scheduled|%s|trigger=%s|scheduled_time=%s" % (jobEvent.job.name, jobEvent.job.trigger, jobEvent.scheduled_run_time))

    else:
        # 异常或丢失
        logger.exception((jobEvent.code, jobEvent.exception, jobEvent.job, jobEvent.scheduled_run_time))
        except_msg = "miss execute" if not jobEvent.exception else str(jobEvent.exception.args)
        alert_msg = "%s,%s,%s,%s" % (
            jobEvent.code, jobEvent.job.name, except_msg, jobEvent.scheduled_run_time.strftime("%H:%M:%S"))
        # 告警
        #monitor.add_alert_msg(jobEvent.job.name, alert_msg)
Esempio n. 10
0

def clean_cached_items():
    urllib2.urlopen(clean_cache_url)


def send_reauth_email():
    urllib2.urlopen(reauth_url)


def send_rtsina():
	urllib2.urlopen(rtsina_url)

# need server timezone should be GMT+8
day_update_sched = Scheduler()
day_update_sched.daemonic = False
day_update_sched.add_cron_job(check_update, hour='7-23', minute='10,30,50', second=0)
day_update_sched.start()

night_update_sched = Scheduler()
night_update_sched.daemonic = False
night_update_sched.add_cron_job(check_update, hour='0-6', minute=10, second=0)
night_update_sched.start()

# day_rtsina_sched = Scheduler()
# day_rtsina_sched.daemonic = False
# day_rtsina_sched.add_cron_job(send_rtsina, hour='7-23', minute='12,32,52', second=0)
# day_rtsina_sched.start()

# night_rtsina_sched = Scheduler()
# night_rtsina_sched.daemonic = False
Esempio n. 11
0
File: cron.py Progetto: de1o/MoePad
def updateNewItem():
    update_sched = Scheduler()
    update_sched.daemonic = False
    update_sched.add_cron_job(check_update, hour='0-23',
                              minute='8,28,48', second=0)
    update_sched.start()
Esempio n. 12
0
    print "saveing!"
    data = getJson()

    date = datetime.now().date()
    dateStr = str(date)
    hour = datetime.now().hour
    hourStr = str(hour)
    nameStr = dateStr+'-'+hourStr

    file(nameStr+'.json','w').write(json.dumps(data,indent=2))
    f = open(nameStr+'.json')
    jsoned = json.load(f)
    f.close()
    with open(nameStr+'.csv','wb+') as csv_file:
        csv_writer = csv.writer(csv_file)
        for item in jsoned:
            csv_writer.writerow([item[u'pm2_5'],item[u'primary_pollutant'],item[u'co'],item[u'pm10'],item[u'area'],item[u'o3_8h'],
                       item[u'o3'],item[u'o3_24h'],item[u'station_code'],item[u'quality'],item[u'co_24h'],item[u'no2_24h'],
                       item[u'so2'],item[u'so2_24h'],item[u'time_point'],item[u'pm2_5_24h'],item[u'position_name'],
                       item[u'o3_8h_24h'],item[u'aqi'],item[u'pm10_24h'],item[u'no2']])

if __name__ == '__main__':
    saveData()

    scheduler = Scheduler()
    scheduler.daemonic = False
    scheduler.add_interval_job(saveData,seconds=3600)

    print('One Hour')
    scheduler.start()
Esempio n. 13
0
def clean_cached_items():
    urllib2.urlopen(clean_cache_url)


def send_reauth_email():
    urllib2.urlopen(reauth_url)


def send_rtsina():
    urllib2.urlopen(rtsina_url)


# need server timezone should be GMT+8
day_update_sched = Scheduler()
day_update_sched.daemonic = False
day_update_sched.add_cron_job(check_update,
                              hour='7-23',
                              minute='10,30,50',
                              second=0)
day_update_sched.start()

night_update_sched = Scheduler()
night_update_sched.daemonic = False
night_update_sched.add_cron_job(check_update, hour='0-6', minute=10, second=0)
night_update_sched.start()

# day_rtsina_sched = Scheduler()
# day_rtsina_sched.daemonic = False
# day_rtsina_sched.add_cron_job(send_rtsina, hour='7-23', minute='12,32,52', second=0)
# day_rtsina_sched.start()
Esempio n. 14
0
File: cron.py Progetto: de1o/MoePad
def night_send():
    night_send_sched = Scheduler()
    night_send_sched.daemonic = False
    night_send_sched.add_cron_job(send, hour='0-6', minute=10, second=0)
    night_send_sched.start()
Esempio n. 15
0
    print "saveing!"
    data = getJson()

    date = datetime.now().date()
    dateStr = str(date)
    hour = datetime.now().hour
    hourStr = str(hour)
    nameStr = dateStr+'-'+hourStr

    file(nameStr+'.json','w').write(json.dumps(data,indent=2))
    f = open(nameStr+'.json')
    jsoned = json.load(f)
    f.close()
    with open(nameStr+'.csv','wb+') as csv_file:
        csv_writer = csv.writer(csv_file)
        for item in jsoned:
            csv_writer.writerow([item[u'pm2_5'],item[u'primary_pollutant'],item[u'co'],item[u'pm10'],item[u'area'],item[u'o3_8h'],
                       item[u'o3'],item[u'o3_24h'],item[u'station_code'],item[u'quality'],item[u'co_24h'],item[u'no2_24h'],
                       item[u'so2'],item[u'so2_24h'],item[u'time_point'],item[u'pm2_5_24h'],item[u'position_name'],
                       item[u'o3_8h_24h'],item[u'aqi'],item[u'pm10_24h'],item[u'no2']])

if __name__ == '__main__':
    saveData()

    scheduler = Scheduler()
    scheduler.daemonic = False
    scheduler.add_interval_job(saveData,seconds=3600)

    print('One Hour')
    scheduler.start()
Esempio n. 16
0
    def tasksubmit(self):
        try:
            for item in self.task_conf:
                #获取监控对象、监控指标
                monitorObj_id = item['monitorObj_id']
                monitorMetrics_id = item['monitorMetrics_id']
                monitorMetrics_name = item['monitorMetrics_name']
                taskrunner = item['taskrunner']

                #status为1表示有效监控,否则视为下架
                if(item['status'] == 1):
                    ret_monitorMetrics_id,ret_monitorObj_id,ret_monitorMetrics_name = self.get_monitorMetrics(monitorMetrics_id)
                    try:
                        assert(monitorObj_id == ret_monitorObj_id)
                        assert(monitorMetrics_id == ret_monitorMetrics_id)
                        assert(monitorMetrics_name == ret_monitorMetrics_name)
                        info_str = str(monitorObj_id) + "," + str(monitorMetrics_id) + "," + monitorMetrics_name + "verify success"
                        logging.info(info_str)
                    except Exception as e:
                        #校验异常,打日志,发短信、邮件通知系统管理员
                        error_str = u"taskRunner.conf 任务配置有误 :" + str(monitorObj_id) + "," + str(monitorMetrics_id) + ","
                        + monitorMetrics_name + u" 合法性校验失败,请检查此任务配置"
                        logging.error(error_str)
                        logging.error(str(e))
                        #短信通知
                        msgSend(self.phone_list, self.subject + " : " + error_str, mode="qapi")
                        #邮件通知
                        maillib = mailLib()
                        maillib.simple_text_mail_send(self.subject, error_str, self.from_mail_addr, self.to_mail_addr, self.mail_server)
                        continue

                    #校验通过,根据start_type提交合适的任务
                    #---------------------------------------------------------------------------------------------
                    #任务类型
                    # 0:定时启动,此时需要配置定时启动时间、间隔时间;
                    # 1:间隔启动,此时需要配置间隔周期;
                    # 2: 立即启动,一次性任务
                    #---------------------------------------------------------------------------------------------
                    start_type = item['start_type']
                    #类型:定时提交
                    if(start_type == 0):
                        start_time = item['start_time']
                        period = item['period']
                        sched = Scheduler()
                        sched.daemonic = False
                        dd = datetime.datetime.now()
                        dd_delta = dd + datetime.timedelta(days=0)
                        cutime=dd_delta.strftime("%Y-%m-%d")
                        ddnew_str = cutime + " " + start_time
                        ddnew = datetime.datetime.fromtimestamp(time.mktime(time.strptime(ddnew_str, "%Y-%m-%d %H:%M:%S")))
                        sched.add_interval_job(self.add_job, minutes=period, start_date=ddnew, args=[monitorObj_id, monitorMetrics_id, monitorMetrics_name, taskrunner])
                        sched.start()
                        logging.info("add_interval_job success")

                    #类型:间隔提交
                    elif(start_type == 1):
                        period = item['period']
                        sched = Scheduler()
                        sched.daemonic = False
                        sched.add_cron_job(self.add_job, minute='*/' + str(period), args=[monitorObj_id, monitorMetrics_id, monitorMetrics_name, taskrunner])
                        sched.start()
                        logging.info("add_cron_job success")
                    #类型:立即
                    elif(start_type == 2):
                        self.add_job(monitorObj_id, monitorMetrics_id, monitorMetrics_name, taskrunner)
                        logging.info("add instant job success")
        except Exception as e:
            logging.error(str(e))
def main():
    parser = OptionParser()
    parser.add_option('--scrapyd', dest='scrapyd', default='scrapy-server:6800',
            help='scrapyd url')
    parser.add_option('--project', dest='project', default='feiying',
            help='scrapy project name')

    (options, args) = parser.parse_args()
    if options.scrapyd == None or options.project == None:
        parser.print_help()
        sys.exit()

    cmd = "curl http://%s/schedule.json -d project=%s -d spider=" % (options.scrapyd,
            options.project)

    sched = Scheduler()
    sched.daemonic = False

    @sched.cron_schedule(hour='0-23/2', minute=20)
    def crawl_youku_video():
        os.system(cmd + 'youku_video')

    @sched.cron_schedule(hour='0-23/2', minute=30)
    def crawl_tudou_video():
        os.system(cmd + 'tudou_video')

#    @sched.cron_schedule(hour=11, minute=25)
#    def crawl_letv_movie():
#        os.system(cmd + 'letv_movie')
             
#    @sched.cron_schedule(hour=21, minute=25)
#    def crawl_letv_series():
#        os.system(cmd + 'letv_series')
   
    # schedule the task of feiying mysql index backup in coreseek
    @sched.interval_schedule(hours=3)
    def backup_feiying_mysql_index():
        bak_path = '/backups'
        is_path_ready = True
        if os.path.exists(bak_path) == False:
            # mkdir for backup path
            try:
                os.makedirs(bak_path)
                print 'create ' + bak_path + ' successfully'
            except error:
                is_path_ready = False
                print 'create ' + bak_path + " error"
        # do backup
        bak_file_name = "feiying_bak.tar.gz"
        tmp_bak_name = "tmp_" + bak_file_name
        coreseek_data_path = "/usr/local/coreseek/var/data" 
        if is_path_ready == True:
            backup_cmd = "tar -zcvf " + bak_path + "/" + tmp_bak_name + " " + coreseek_data_path  + "/feiying_mysql.* " + coreseek_data_path + "/binlog.*"
            result = os.system(backup_cmd)
            if 0 == result:
                # delete previous backup file, and rename tmp backup file to formal backup file
                os.rename(bak_path + "/" + tmp_bak_name, bak_path + "/" + bak_file_name)
    
    # Schedule the task of feiying mysql index
    @sched.interval_schedule(hours=2)
    def index_feiying_mysql():
        index_cmd = "/usr/local/coreseek/bin/indexer -c /usr/local/coreseek/etc/feiying_mysql.conf --all --rotate --quiet"
        os.system(index_cmd)
    
    sched.start()
Esempio n. 18
0
import signal
import daemon
import lockfile

from apscheduler.scheduler import Scheduler

def job_function():
  print "Hello World"


def main():
  signal.pause()

if __name__ == '__main__':
  sched = Scheduler()
  sched.daemonic = True
  sched.start()
  sched.add_interval_job(job_function, minutes=1)

  context = daemon.DaemonContext(
    working_directory='/data',
    umask=0o002,
    pidfile=lockfile.FileLock('/tmp/wikipedia_pagecount.pid'),
    )

  context.signal_map = {
    signal.SIGTERM: 'terminate',
    signal.SIGHUP: 'terminate',
    signal.SIGUSR1: 'terminate'
  }
Esempio n. 19
0
    使用说明: http://pythonhosted.org/APScheduler/index.html
    相关API: http://packages.python.org/APScheduler/genindex.html
    下载地址: https://pypi.python.org/pypi/APScheduler/

    使用 easy_install 安装:
        easy_install apscheduler


1. 指定特定时间运行某一任务,可以通过如下方式:

    from datetime import datetime
    from apscheduler.scheduler import Scheduler

    sched = Scheduler()
    sched.daemonic = False # daemonic参数,表示执行线程是非守护的,在Schduler的文档中推荐使用非守护线程
    # 上面两行,也可以简写为:  sched = Scheduler(daemonic = False)

    def job_function(text):
        print text

    # 指定时间运行,且只运行一次
    job = sched.add_date_job(job_function, datetime(2013, 10, 30, 17, 13, 59), ['Hello World'])
    sched.start()


2. 有些时候,我们需要每隔一定时间运行一下任务 Interval-based scheduling 的方式,如下:

    from apscheduler.scheduler import Scheduler

    sched = Scheduler()
Esempio n. 20
0
import datetime
import time
from apscheduler.scheduler import Scheduler

#Start the scheduler
sched = Scheduler()
sched.daemonic = False
sched.start()


def job_function():
    print "hello world"
    print(datetime.datetime.now())
    time.sleep(20)


#Schedules job_function to be run once each minute
sched.add_cron_job(job_function, minute='0-59')
Esempio n. 21
0
File: cron.py Progetto: de1o/MoePad
def day_send():
    day_send_sched = Scheduler()
    day_send_sched.daemonic = False
    day_send_sched.add_cron_job(send, hour='7-23', minute='10,30,50', second=0)
    day_send_sched.start()