def run(self): setup_logging() log = logging.getLogger('hermes_cms.service.runner') while True: try: config = Registry().get(self.config_file) # pylint: disable=broad-except except Exception as e: log.exception(e) module_name = config['jobs'][self.name]['module_name'] class_name = config['jobs'][self.name]['class_name'] mod = __import__(module_name, fromlist=[class_name]) service_class = getattr(mod, class_name) job_class = service_class(self.name, self.region, config) seconds = int(config['jobs'][self.name]['frequency']) scheduler = BlockingScheduler() scheduler.add_job(job_class.do_action, IntervalTrigger(seconds=seconds)) log.info('Starting Scheduled job %s', self.name) scheduler.start()
def test5(): """定时执行任务,关闭调度器""" sched = BlockingScheduler() sched.add_job(my_job, 'date',run_date=datetime(2016, 8, 16, 12, 34,5), args=('123',),seconds=1, id='my_job_id') # add_job的第二个参数是trigger,它管理着作业的调度方式。它可以为date, interval或者cron。 sched.start() print('定时任务')
def test1(): """定时执行任务""" start_time = time.time() sched = BlockingScheduler() sched.add_job(my_job, 'interval', args=('123',),seconds=1, id='my_job_id') # 每隔1秒执行一次my_job函数,args为函数my_job的输入参数;id:可省略; sched.start() # 程序运行到这里,并不往后运行,除非把任务都完成,但ctrl+C 可以终止 print('运行不到这里')
def main(argv): if len(argv) > 1: #initialize some variables pass scheduler = BlockingScheduler() scheduler.add_job(link, "interval", hours=1, id="link_job") scheduler.start()
def task_schedule(self): scheduler = BlockingScheduler() try: scheduler.add_job(self._get_task, 'interval', seconds=30) scheduler.start() except Exception as e: print(e)
class BGPTableDownload(basesinfonierspout.BaseSinfonierSpout): def __init__(self): basesinfonierspout.BaseSinfonierSpout().__init__() def useropen(self): self.interval = int(self.getParam("frequency")) self.sched = BlockingScheduler() self.sched.add_job(self.job, "interval", seconds=self.interval, id="bgptable") self.sched.start() def usernextTuple(self): pass def job(self): query = "http://bgp.potaroo.net/v6/as2.0/bgptable.txt" self.log(query) headers = { "User-Agent" : "Mozilla/5.0 (Windows NT 6.2; WOW64; rv:20.0) Gecko/20100101 Firefox/20.0" } r = requests.get(query, headers=headers) self.emit()
class CrawlScheduler(object): def __init__(self, crawler): self.crawler = crawler self.scheduler = BlockingScheduler() def start(self): logging.info('=============================================') logging.info('[{0}] Start crawling from Instagram...'.format(datetime.datetime.now())) crawling_start_time = time.time() self.crawler.crawl() crawling_end_time = time.time() time_spent = int(crawling_end_time - crawling_start_time) logging.info('Time spent: {0}min {1}s'.format(time_spent / 60, time_spent % 60)) logging.info('=============================================') @staticmethod def get_nearest_start_time(): nearest_start_timestamp = long(time.time() / (60 * 15) + 1) * 60 * 15 return datetime.datetime.fromtimestamp(nearest_start_timestamp) def start_scheduler(self, should_continue=False): # Config logging and alarm. logging.basicConfig(filename=self.crawler.get_crawl_log(), level=logging.DEBUG) scheduler_start_time = self.get_nearest_start_time() redis_client.set(self.crawler.get_redis_end_time_key(), str(scheduler_start_time)) if not should_continue: redis_client.set(self.crawler.get_redis_start_time_key(), str(scheduler_start_time - datetime.timedelta(minutes=14, seconds=59))) self.scheduler.add_job(self.start, 'interval', start_date=scheduler_start_time, minutes=15, misfire_grace_time=600) self.scheduler.start()
class PeriodGather(object): def __init__(self, db): self.__db = db self.__scheduler = BlockingScheduler() self.__scheduler.add_job(self.gather, 'cron', day_of_week='mon-fri', hour=16, minute=30) def start(self): self.__scheduler.start() def gather(self): _logger.info('period gather stock basic and history data, begin.....') try: StockBasicCollector(self.__db).collect() stock_list = self.__get_stock_list() for stock in stock_list: HistDataCollector(stock, self.__db).collect() except Exception as e: _logger.exception(e) _logger.info('period gather stock basic and history data, end.....') def __get_stock_list(self): collection = Collection(Constants.BASIC_COLLECTION, self.__db) stock_infos = collection.find() stock_list = [] for stock_info in stock_infos: stock_list.append(stock_info['code']) return stock_list def stop(self): if self.__scheduler: self.__scheduler.shutdown()
class PeriodicRetrievalManager(RetrievalManager): """ Manages the periodic retrieval of updates. """ def __init__(self, retrieval_period: TimeDeltaInSecondsT, update_mapper: UpdateMapper, logger: Logger=PythonLoggingLogger()): """ Constructor. :param retrieval_period: the period that dictates the frequency at which data is retrieved :param update_mapper: the object through which updates can be retrieved from the source :param logger: log recorder """ super().__init__(update_mapper, logger) self._retrieval_period = retrieval_period self._running = False self._state_lock = Lock() self._updates_since = None # type: datetime self._scheduler = BlockingScheduler() self._scheduler.add_job(self._do_periodic_retrieval, "interval", seconds=self._retrieval_period, coalesce=True, max_instances=1, next_run_time=datetime.now()) def run(self, updates_since: datetime=datetime.min): self._updates_since = localise_to_utc(updates_since) with self._state_lock: if self._running: raise RuntimeError("Already running") self._running = True self._scheduler.start() def start(self, updates_since: datetime=datetime.min): """ Starts the periodic retriever in a new thread. Cannot start if already running. :param updates_since: the time from which to get updates from (defaults to getting all updates). """ Thread(target=self.run, args=(updates_since, )).start() def stop(self): """ Stops the periodic retriever. """ with self._state_lock: if self._running: self._scheduler.shutdown(wait=False) self._running = False logging.debug("Stopped periodic retrieval manger") def _do_periodic_retrieval(self): assert self._updates_since is not None updates = self._do_retrieval(self._updates_since) if len(updates) > 0: # Next time, get all updates since the most recent that was received last time self._updates_since = updates.get_most_recent()[0].timestamp else: # Get all updates since same time in future (not going to move since time forward to simplify things - there # is no risk of getting duplicates as no updates in range queried previously). Therefore not changing # `self._updates_since`. pass
def run(): sched = BlockingScheduler() sched.add_job(main.run, "cron", hour="7,11,17") try: sched.start() except KeyboardInterrupt: pass
def task_schedule(self): scheduler = BlockingScheduler() try: scheduler.add_job(self._get_task, 'cron', day='1-31', hour=self.sche_time[0], minute=self.sche_time[1], second=self.sche_time[2]) scheduler.start() except Exception as e: print(e)
def main(): """Run tick() at the interval of every ten seconds.""" scheduler = BlockingScheduler(timezone=utc) scheduler.add_job(tick, 'interval', seconds=10) try: scheduler.start() except (KeyboardInterrupt, SystemExit): pass
def cronjob(): scheduler = BlockingScheduler() print "*******" scheduler.add_job(checkupdate,'cron', second='0', hour='2',minute='0') try: scheduler.start() except (KeyboardInterrupt, SystemExit): scheduler.shutdown()
def startCrawlerTask(): from apscheduler.schedulers.blocking import BlockingScheduler scheduler = BlockingScheduler() scheduler.add_job(crawlerTask, 'cron', second='0',minute='15', hour='8') print('Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C')) try: scheduler.start() except (KeyboardInterrupt, SystemExit): scheduler.shutdown()
def run(self): scheduler = BlockingScheduler() cron_jobs = [NotificationJob(), TransferJob()] for cron_job in cron_jobs: trigger = cron_job.trigger() scheduler.add_job(cron_job.run, **trigger ) logger.info('running CronJobTaskRunner') scheduler.start()
def main(): scheduler = BlockingScheduler() scheduler.add_job(kick_off_script, 'interval', seconds=60) print('Press Ctrl+C to exit') try: scheduler.start() except (KeyboardInterrupt, SystemExit): pass
def main(): sched = BlockingScheduler() sched.add_job(spider.spider(), 'interval', seconds=21600) print 'Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C') try: sched.start() except (KeyboardInterrupt, SystemExit): pass
def main(): ceilometer = create_ceilomenter_client() resources = ceilometer.resources.list() for i in resources: # print '\n' print i.resource_id # Run this job in certian time, with parameter 'text' sched = BlockingScheduler() sched.add_job(my_job, 'interval', seconds=5, args=['test']) sched.start()
def test7(): """定时执行任务,通过ctrl+c终止""" scheduler = BlockingScheduler() scheduler.add_job(tick, 'interval', seconds=1) print('Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C')) try: scheduler.start(paused=False) # 后面的并没有执行,若参数paused为真,则并不执行 except (KeyboardInterrupt, SystemExit): pass print('ok')
def main(): repo_slugs = ['start-jsk/jsk_apc'] gh_repos_handler = GitHubReposHandler(repo_slugs) scheduler = BlockingScheduler(logger=logger) scheduler.add_job(gh_repos_handler.send_empty_pr, trigger='interval', minutes=5) scheduler.add_job(gh_repos_handler.close_ci_success_empty_pr, trigger='interval', minutes=5) scheduler.print_jobs() scheduler.start()
def main(generate_once, minutes): generate() if not generate_once: print('Starting schedule, every {} minutes'.format(minutes)) scheduler = BlockingScheduler() scheduler.add_job(generate, 'interval', minutes=1) try: scheduler.start() except (KeyboardInterrupt, SystemExit): pass
def main(): """main """ print arrScripts logging.basicConfig() objSche = BlockingScheduler() for (k, v) in arrScripts.items(): #ThreadNum(v) o = getObj(v['classname']) f = getattr(o(), v['method']) objSche.add_job(f, 'cron', month=v['cron_month'], day=v['cron_day'], hour=v['cron_hour'], minute=v['cron_minute'], kwargs=v) objSche.start()
def scheduler(self): """Runs the daemon scheduler """ self.write_pid(str(os.getpid())) if self.setproctitle: import setproctitle setproctitle.setproctitle('mymongo_scheduler') sched = BlockingScheduler() try: sched.add_job(self.dummy_sched, 'interval', minutes=1) sched.start() except Exception as e: self.logger.error('Cannot start scheduler. Error: ' + str(e))
def blocking_schedule(): from apscheduler.schedulers.blocking import BlockingScheduler def tick(): print('Tick! The time is: %s' % datetime.now()) scheduler = BlockingScheduler() scheduler.add_job(tick, 'interval', seconds=3) print('Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C')) try: scheduler.start() except (KeyboardInterrupt, SystemExit): pass
class AnalysisTask(object): def __init__(self,parent,cellList,interval_min=1): # cellList is a list of CellItem super(AnalysisTask, self).__init__() self.parent = parent self.cellList = cellList self.stage = parent.stage self.interval_min = interval_min self.taskOn = True self.scheduler = BlockingScheduler() def start(self): self.t = threading.Thread(target=self.analyseJob,args=(None,)) self.t.start() def stop(self): self.taskOn = False if self.scheduler.running: self.scheduler.shutdown() print("Scheduler stopped") # self.t.join() def analyseJob(self,s): self.scheduler.add_job(self.analyseList, 'interval', minutes=self.interval_min) self.analyseList() self.scheduler.start() def analyseList(self): analysis_start_time = time.time() print("Starting cell analysis") for cellItem in self.cellList: self.analyseCell(cellItem) if not self.taskOn: print("AnalysisTask stopped") return now = time.time() next_analysis_time = analysis_start_time + 60*self.interval_min waiting_time = next_analysis_time - now next_analysis_time_str = time.strftime("%d %b %Y %H:%M:%S",time.localtime(next_analysis_time)) print("Next analysis in {} min (on {})".format(waiting_time/60,next_analysis_time_str)) def analyseCell(self,cellItem): print("Going to cell {}".format(cellItem.cellName)) self.stage.moveAbsolute(cellItem.position,wait=True) assert self.stage.position == cellItem.position self.parent.saveData(cellItem.cellName)
class TalosCollectorCron(object): """docstring for talosCollectorCron""" def __init__(self, redis_conn,config_path): super(TalosCollectorCron, self).__init__() self.redis_conn = redis_conn self.config_path = config_path self.scheduler = BlockingScheduler() json_config = open(config_path, 'r') # print json_config.read() self.jsons = json.loads(json_config.read()) def myjob(self,c): # 获取日期 # 获取HOST信息 # analyzer if c['enable']: tmp = os.popen(c['cmd']).readlines() data = {} data['content'] = tmp data['analyzer'] = c['analyzer'] data['host'] = socket.gethostname() data['date'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') data['param'] = c['param'] r.publish('talos:q:cmd',json.dumps(data)) def start(self): for c in self.jsons: cron = c['time'].split(' ') job = self.scheduler.add_job(self.myjob,args=[c],trigger='cron', year=cron[5], month=cron[4], day=cron[3], hour=cron[2], minute=cron[1], second=cron[0]) print 'TalosCollectorCron Start..' self.scheduler.start()
def start_blocking_scheduler(): """ Executes 'synchronize_job' as a synchronous job every hour. Console normal entry point """ init_logger() logger.info('SYNCHRONIZING PROCESS... starting blocking scheduler') sched = BlockingScheduler() updating_1h_job = sched.add_job(scripts_async_execution, 'cron', minute="0", args=[[UPDATING_1H_PROCESSES, getPastHour]]) full_updating_1d_job = sched.add_job(scripts_async_execution, 'cron', hour="0", minute="30", args=[[FULL_UPDATING_1D_PROCESSES]]) full_updating_1w_job = sched.add_job(scripts_async_execution, 'cron', day=6,hour="2",minute="30", args=[[FULL_UPDATING_1W_PROCESSES]]) try: sched.start() except (KeyboardInterrupt, SystemExit): logger.info('Stopping blocking scheduler') pass
class RepeatJob: """ BlockingScheduler()를 멤버변수로 하여 입력받은 job을 실행한다. """ def __init__(self): self.scheduler = BlockingScheduler() print("scheduler init...") """ job, job_type (e.g. ``date``, ``interval`` or ``cron``), seconds 를 입력받는다. """ def add_job(self, job, typ, seconds): self.scheduler.add_job(job, typ, seconds=seconds) print("added interval job: ", job) """ BlockingScheduler를 실행한다. """ def start(self): self.scheduler.start()
def startschedule(): scheduler = BlockingScheduler() scheduler.add_job(test,'cron', second='*/3', hour='*') scheduler.add_job(dotraveljobs,'cron', second='*/3', hour='*') scheduler.add_job(doindexjobs,'cron', second='*/3', hour='*') scheduler.add_job(docontentjobs,'cron', second='*/3', hour='*') try: scheduler.start() except (KeyboardInterrupt, SystemExit): scheduler.shutdown()
def main(*args, **kwargs): config_filename = sys.argv[1] if len(sys.argv) > 1 else DEFAULT_CONFIG_FILENAME config = DEFAULT_CONFIG.copy() miners = [] try: config_file = yaml.load(open(config_filename)) config.update(config_file['defaults']) miners.extend(config_file['miners']) except FileNotFoundError: print('Config file \'{}\' was not found.'.format(config_filename)) exit(1) except KeyError as e: print('Config did not contain section {}.'.format(e)) exit(1) # print(config) # print(miners) scheduler = BlockingScheduler(job_defaults={'coalesce': True}) scheduler.add_listener(listener, EVENT_JOB_ERROR) for idx, miner in enumerate(miners): schedules = miner.pop('schedule', []) device = Antminer(**miner) job_config = merge_dicts(config, {'jobs': [], 'idx': idx}) job = scheduler.add_job(throttle, 'interval', args=(device,), kwargs=job_config, misfire_grace_time=30, seconds=config['refresh_time'], next_run_time=datetime.datetime.now() + datetime.timedelta(seconds=idx * 0.2)) job_config['jobs'].append(job) for schedule in schedules: print(schedule) trigger_args = {k: schedule.pop(k) for k in schedule.copy() if k in ['year', 'month', 'day', 'week', 'day_of_week', 'hour', 'minute', 'second', 'start_date', 'end_date']} print(trigger_args) job = scheduler.add_job(do_thing, 'cron', args=(device, schedule['command'], schedule['value'],), kwargs=job_config, **trigger_args) job_config['jobs'].append(job) try: scheduler.start() except (KeyboardInterrupt, SystemExit): pass
{'USER_AGENT': 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'}) crawl.crawl(ProvinceSpider) crawl.start() crawl.stop() def run_crawl(path=None): p = Process(target=_crawl, args=['hahahahha']) p.start() #p.join() scheduler = BlockingScheduler(daemon=True) scheduler.add_job(run_crawl, "cron", hour=8, minute=30, timezone='Asia/Shanghai') scheduler.add_job(run_crawl, "cron", hour=12, minute=30, timezone='Asia/Shanghai') scheduler.add_job(run_crawl, "cron", hour=18, minute=30, timezone='Asia/Shanghai') try: scheduler.start()
except Exception: time.sleep(0.5) continue if r.status_code != 200: time.sleep(0.5) continue r_info = r.json() r.close() return r_info def printPrice(): select = ['fil6zqc', 'omgqc'] url = 'http://api.zb.live/data/v1/allTicker' for item in select: temp = url + "?market=" + item x = get(temp) str = '' for key in x: if key in select: str += '[' + key + '-' + x[key]['last'] + ']\n' text = '测试:' + str + time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) dingding.dingmessage(text=text) if __name__ == '__main__': scheduler = BlockingScheduler() scheduler.add_job(job, 'interval', seconds=60) scheduler.start()
product=product, currentPrice=current_price) if previous_price != current_price: history = PriceHistory(currentPrice=current_price, product_id=product.id) product.price = current_price db.session.add(product) db.session.add(history) try: db.session.commit() except Exception as e: print(e) alert_history = AlertHistory(currentPrice=current_price, wishlist_id=alert.id) alert.currentPrice = current_price db.session.add(alert_history) db.session.add(alert) try: db.session.commit() except Exception as e: print(e) job = scheduler.add_job(scheduled_job, 'interval', minutes=120, id='price_alerts_cron', replace_existing=True) scheduler.start()
from apscheduler.schedulers.blocking import BlockingScheduler from datetime import datetime import os # 输出时间 def job(): print(datetime.now().strftime("%Y-%m-%d %H:%M:%S")) os.system('cd /Users/zhl/Python001-class01/week10/cellphone && scrapy crawl smzdm') # BlockingScheduler scheduler = BlockingScheduler() scheduler.add_job(job, 'cron', day_of_week='0-6', hour=08, minute=12) #每天08:12运行一次爬虫 scheduler.start()
from apscheduler.schedulers.blocking import BlockingScheduler import sqlite3 as lite def insert(): con = lite.connect("testing.db") cur = con.cursor() cur.execute("INSERT INTO test (user) VALUES ('test')") con.commit() con.close() def testing(): print("Hello world") scheduler = BlockingScheduler() scheduler.add_job(testing, 'interval', hours=0.0166666666) scheduler.add_job(insert, 'interval', hours=0.001) scheduler.start()
def dojob(): #创建调度器:BlockingScheduler sched = BlockingScheduler() intc = random.randint(34, 37) sched.add_job(fun_c, 'cron', hour=22, minute=intc) sched.start()
from Test.MACD_Timer import macd_test_daily def MACD_Report(): macd_test_daily() def update_k(): update_K_data() conn_k.commit() send_basic_email() # 下面这几行代码用于手动执行使用,使用定时器时记得将其屏蔽 macd_test_daily() update_K_data() conn_k.commit() send_basic_email() sched = BlockingScheduler() sched.add_job(func=MACD_Report, trigger='cron', day_of_week='mon-sat', hour=5, minute=0) sched.add_job(func=update_k, trigger='cron', day_of_week='mon-sat', hour=6, minute=30) sched.start()
print 'run auto it control...' command_autoit = "autoit3 C:\\DaAn\\AutoIT_Scripts\\RTControl.au3" os.system(command_autoit) def error_report(): print 'run auto it error_report...' command_autoit = "autoit3 C:\\DaAn\\AutoIT_Scripts\\ErrorReport.au3" os.system(command_autoit) def processChecker(): command_remote = 'pslist \\\\10.10.10.100 -u bmuser -p bmuser HRSTART' #if os.system(command_remote) == 0: if 1: try: print 'apscheduler run:' print time.localtime(time.time()) processKiller() processCopier() reagent() calibation() control() error_report() except Exception as ex: print ex processChecker() processPart1() sched = BlockingScheduler() sched.add_job(processChecker, 'interval', seconds = 200) sched.start()
def my_scheduler(runtime): sched = BlockingScheduler() #生成对象 sched.add_job(my_job, 'interval', seconds=runtime) #在指定时间运行一次 sched.start()
from apscheduler.schedulers.blocking import BlockingScheduler from Script.Util.database import iterate_for_database from Script.Util.system_info import get_cpu_info, get_ram_info, get_disk_info, get_network_info, \ get_system_info, get_mac_address def main(): ''' Main function of application Call function to get system data and send to database :return: None ''' cpu_info = get_cpu_info() ram_info = get_ram_info() disk_info = get_disk_info() network_info = get_network_info() system_info = get_system_info() mac_address = get_mac_address() info = [cpu_info, ram_info, disk_info, network_info, system_info] iterate_for_database(info, mac_address) if __name__ == '__main__': main() scheduler = BlockingScheduler() # internal is set to 10 seconds to leave enough time to the script to send data to influxdb scheduler.add_job(main, 'interval', seconds=10) scheduler.start()
mail_helper = MailHelper() try: mail_helper.send_mail_to(dest='*****@*****.**', message=today) except Exception, e: logging.error(e) everyday_com_cashflow = EverydayComCashflow() everyday_com_cashflow.work() if __name__ == '__main__': logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s', datefmt='%a, %d %b %Y %H:%M:%S', filename=LOG_FILENAME, filemode='w') uri = mysqlUri() spider = Spider() scheduler = BlockingScheduler() # scheduler.add_job(spider.work, 'date', run_date=datetime.datetime.now()) scheduler.add_job(spider.work, 'cron', day_of_week='0-6', hour='9,12,15,18', minute=33, end_date='2018-08-01') try: scheduler.start() except (KeyboardInterrupt, SystemExit): scheduler.shutdown()
'IP': ips[i], 'Port': ports[i] }) pool = ThreadPool() pool.map(proxier.check, addr) pool.close() pool.join() try: conn = sqlite3.connect("/root/proxy.db") curs = conn.cursor() curs.execute(SCHEMA) curs.execute("delete from 'proxy'") conn.commit() for addr in proxier.get(): curs.execute("insert into proxy (ip,port) values (?,?)", [addr['IP'], addr['Port']]) conn.commit() curs.close() conn.close() except sqlite3.Error: pass if __name__ == "__main__": scheduler = BlockingScheduler() scheduler.add_job(save, 'interval', hours=1) try: scheduler.start() except (KeyboardInterrupt, SystemExit): print KeyboardInterrupt print SystemExit
if page == '0': f.close() print(f'{word}关键字已爬完') return page = skb_func(phone, passwd, word, int(page)) f.seek(0) new_data = word + ' ' + str(page) + '\n' word_lists[line] = new_data f.writelines(word_lists) f.close() def run(): print('----------开始爬取----------') start_spider('13514987518', 'jsn95279527', '医疗美容', 0) start_spider('13155291086', 'jsn952727', '证券', 1) start_spider('18326601878', 'jsn95279527', '教育', 2) start_spider('18551107173', 'jsn95279527', '餐饮加盟', 3) start_spider('18856895487', 'jsn95279527', '呼叫中心', 4) start_spider('17621862011', 'jsn95279527', '房产买卖', 5) print('----------结束爬取----------') if __name__ == '__main__': scheduler = BlockingScheduler() scheduler.add_job(func=run, trigger='cron', hour='19', minute='0', second='10') scheduler.start()
# run.py # -------------------------------------------------------------------------------- # This file is the main program in the HODL-bot application. Execute this script # to start the bot. # -------------------------------------------------------------------------------- # import components from apscheduler.schedulers.blocking import BlockingScheduler import config from os import system # define function to execute trader script def run_rebal(): system('/<path to>/trader.py') # define and start scheduler of job scheduler = BlockingScheduler() scheduler.add_job(run_rebal, 'interval', minutes=config.interval) scheduler.start()
def run(): main() sched = BlockingScheduler() sched.add_job(main, 'interval', minutes=10) # 每10分钟抓取一次 sched.start()
# @click.option('--init', is_flag=True, help='initialize the sqlite database') # @click.option('--check', is_flag=True, help='check the most real-time threat intelligence') # @click.option('--display', is_flag=True, help='displays the latest vulnerability list information') # def main(init, check, display): # if init: # init_sqlite_db() # infos_list = get_tencent_security_info() # insert_security_info(infos_list) # if check: # infos_list = get_tencent_security_info() # check_news_to_remind(infos_list) # if display: # display_tencent_security_info() if __name__ == '__main__': init_sqlite_db() infos_list = get_tencent_security_info() insert_security_info(infos_list) scheduler = BlockingScheduler(timezone="Asia/Shanghai") # scheduler.add_job(main, 'interval', seconds=60) scheduler.add_job(main, "cron", hour=8, minute=30, second=00) try: logger.warning('Press Ctrl+C to exit ...') scheduler.start() except (KeyboardInterrupt, SystemExit): logger.warning('Bye bye ...') send_msg_mm("@escape", "异常退出请处理", "") scheduler.shutdown()
Config.access_token_key, Config.access_token_secret) #Initialize ignorelist ignore_list = IgnoreList("ignorelist") #Initialize scheduler scheduler = BlockingScheduler() #First run RandomTimes() ClearQueue() CheckRateLimit() CheckBlockedUsers() ScanForContests() scheduler.add_job(RandomTimes, 'interval', hours=24) scheduler.add_job(ClearQueue, 'interval', seconds=Config.clear_queue_time) scheduler.add_job(CheckRateLimit, 'interval', seconds=Config.rate_limit_update_time) scheduler.add_job(CheckBlockedUsers, 'interval', seconds=Config.blocked_users_update_time) scheduler.add_job(ScanForContests, 'interval', seconds=Config.scan_update_time) try: scheduler.start() except (KeyboardInterrupt, SystemExit): pass
def printHello(): p1 = mp.Process(target=job3) p1.start() p2 = mp.Process(target=job4) p2.start() def printWelcome(): print('hello,welcome') def stillOn(): print('I am still on') sched = BlockingScheduler() with open('/home/jacky/aiadvisor/learning/multiProcessing/test.txt','r') as f: Lines = f.readlines() for line in Lines[1:]: terms=line.split('|') print(terms) if terms[2]=='week': sched.add_job(eval(terms[0]),'cron',day_of_week=terms[1],hour=terms[3],minute=terms[4],second=terms[5]) if terms[2]=='month': sched.add_job(eval(terms[0]),'cron',day=terms[1],hour=terms[3],minute=terms[4],second=terms[5]) sched.add_job(stillOn,'cron',day_of_week='0-6',hour='08-20',minute='*/1',second='00') sched.start()
# 리눅스에서만 가능함. 따라서 다음을 설치 # pip install apscheduler # https://apscheduler.readthedocs.io/en/v2.1.2/cronschedule.html from apscheduler.schedulers.blocking import BlockingScheduler import time def exec_interval(): # 일정시간 간격으로 수행 print("hello world") def exec_cron(): strf = time.strftime('%c', time.localtime(time.time())) print('cron', strf) sched = BlockingScheduler() # 5초 간격으로 exec_interval()함수 호출 sched.add_job(exec_interval, 'interval', seconds=5) # 예약 방식(매시간 10초, 30일경우 구동) sched.add_job(exec_cron, 'cron', minute="*", second="10, 30") sched.start()
mktime(entry.published_parsed)) pub_localized = utc.localize(pub_datetime) pub_eastern = pub_localized.astimezone(est) if not FeedItem.objects.filter( title=entry.title.encode('ascii', 'ignore')): FeedItem.objects.create( feed=feed, title=entry.title.encode('ascii', 'ignore'), url=entry.link, summary=strip_tags(entry.summary.encode('ascii', 'ignore')), pub_date=pub_eastern) @sched.scheduled_job('interval', days=1) def remove_old_feed_items(): for item in FeedItem.objects.all(): if item.pub_date < utc.localize(datetime.datetime.now() - datetime.timedelta( days=1)).astimezone(est): item.delete() # Run jobs immediately on deploy sched.add_job(func=update_newsfeed, trigger=DateTrigger(run_date=datetime.datetime.now())) sched.add_job(func=remove_old_feed_items, trigger=DateTrigger(run_date=datetime.datetime.now())) sched.start()
def tempCheck(): # TODO Update internal IP to reference global (or learn how to enable hairpin NAT on router) temperApiUrl = "http://192.168.0.11:5022/latest" response = requests.get(temperApiUrl) if response.status_code == 200: temp = response.json()[0]["temp"] else: temp = "A TERRIBLE ERROR" upperBound = 45.0 isNumber = (type(temp) == int or float) if isNumber and temp >= upperBound: # TODO Inject key via en varbs alexaTrigger = "https://maker.ifttt.com/trigger/fridge_door_open/with/key/gxpFX4NxZiaBcLHPowQjNuOMYycauE5FiXnJ841cmca" dictToPost = {'value1': temp} response = requests.post(alexaTrigger, json=dictToPost) if response.status_code == 200: print("Success!") else: print("Temperatures are nominal") if __name__ == "__main__": print("Initializing Temper Notification Service...") scheduler = BlockingScheduler() job = scheduler.add_job(tempCheck, 'interval', minutes=5) scheduler.start()
# 拿出该索引的数据,签到 sign_info = add_sign(one.get("id")) # 删除随机索引的数据 del self.lists[randint] time1_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S') add_log(one.get("name"), sign_info, time1_str) print("随机时间{}".format(time1_str)) print(self.name) print(sign_info) # sendEmail("学校签到信息",str(sign_info),str(li.get("qq"))+"@qq.com") def list_split(items, n): return [items[i:i + n] for i in range(0, len(items), n)] def sign(): split = list_split(list(getlist()), 10) for i in range(len(split)): MyThread(split[i]).start() sched = BlockingScheduler() sched.add_job(sign, 'cron', hour=7, minute=30, max_instances=3) print("sched success run") time1_str = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d %H:%M:%S') print(time1_str) sched.start()
from apscheduler.schedulers.blocking import BlockingScheduler from apscheduler.executors.pool import ProcessPoolExecutor from sent_email import send from receive_email import receive if __name__ == '__main__': send() receive() executors = { 'default': { 'type': 'threadpool', 'max_workers': 20 }, 'processpool': ProcessPoolExecutor(max_workers=5) } job_defaults = {'coalesce': False, 'max_instances': 3} sched = BlockingScheduler() sched.configure(executors=executors, job_defaults=job_defaults) sched.add_job(send, 'cron', hour=7, minute=30, misfire_grace_time=200) sched.add_job(send, 'cron', hour=9, minute=10, misfire_grace_time=200) sched.add_job(send, 'cron', hour=13, minute=0, misfire_grace_time=200) sched.add_job(send, 'cron', hour=19, minute=0, misfire_grace_time=200) sched.add_job(receive, 'cron', minute=0, misfire_grace_time=200) sched.start()
from apscheduler.schedulers.blocking import BlockingScheduler import os def app(): os.system("venv\Scripts\activate & cd GameScraper & python manage.py runcom") app() scheduler = BlockingScheduler() scheduler.add_job(app, trigger='cron', hour='22', minute='30') scheduler.start()
# %% [markdown] # ## 定义运行函数 # %% def execute(): res = getViewersCount() time = res['time'] value = res['value'] end = writeInXls({'filename': 'data.xls', 'time': time, 'value': value}) print(end) # %% # 引入支持循环执行的模块 from apscheduler.schedulers.blocking import BlockingScheduler # %% # 定义BlockingScheduler # scheduler = BackgroundScheduler() scheduler = BlockingScheduler() job = scheduler.add_job(execute, 'interval', seconds=3) scheduler.start() # %% # job.remove()
from apscheduler.schedulers.blocking import BlockingScheduler import os from playsound import playsound def ping(): hostname = "google.com" # replace w ip response = os.system("ping -c 1 " + hostname) if response == 0: #server is up print("1") else: print("SERVER IS DOWN ALERT ALERT") playsound('alarm.mp3') ## Choose an mp3 or any sound file to play, accepts web links ( i think) scheduler = BlockingScheduler() scheduler.add_job(ping, 'interval', seconds=5) scheduler.start()
# -*- coding: utf-8 -*- from strategy import compute_feature as cmp import datetime from logger import hbtrade_logger as log from apscheduler.schedulers.blocking import BlockingScheduler def good(): print(datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")) if __name__ == '__main__': # cmp.cmp_features() sched = BlockingScheduler() sched.add_job(cmp.cmp_features, 'cron', minute='1', hour='*') sched.start()
#primer inserto en la tabla monedas, para que no se repita def obtenerValor1(): session = Session() session.headers.update(headers) try: response = session.get(url, params=parameters) data = json.loads(response.text) return data except (ConnectionError, Timeout, TooManyRedirects) as e: print(e) temp = obtenerValor1() for curso in temp['data']: Nombre2 = str(curso['name']) Simbolo2 = str(curso['symbol']) Logo2 = str(curso['slug']) Fecha_añadida = str(curso['date_added']) Ultima_actualizacion2 = str(curso['last_updated']) tag2 = str(curso['tags']) models.insertInTableMoneda(Nombre2, Simbolo2, Logo2, Fecha_añadida, Ultima_actualizacion2, tag2) scheduler = BlockingScheduler() scheduler.add_job(obtenerValor, 'interval', minutes=30) scheduler.start()
from apscheduler.schedulers.blocking import BlockingScheduler from apscheduler.triggers.cron import CronTrigger def test(): print("Scheduler working") scheduler = BlockingScheduler() scheduler.add_job( func=test, trigger=CronTrigger(minute="1") ) print("Clock started") scheduler.start()
""" Demonstrates how to schedule a job to be run in a process pool on 3 second intervals. """ from datetime import datetime import os from apscheduler.schedulers.blocking import BlockingScheduler def tick(): print('Tick! The time is: %s' % datetime.now()) if __name__ == '__main__': scheduler = BlockingScheduler() scheduler.add_executor('processpool') scheduler.add_job(tick, 'interval', seconds=3) print('Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C')) try: scheduler.initialize() except (KeyboardInterrupt, SystemExit): pass