def run(): executors = { 'default': { 'type': 'threadpool', 'max_workers': 10 }, 'processpool': ProcessPoolExecutor(max_workers=5) } scheduler = BlockingScheduler() scheduler.configure(executors=executors) client = api.use('ths', debug=False) client.connect(r"c:\\workspace\\同花顺\\\\xiadan.exe", timeout=5) client.enable_type_keys_for_editor() # add job for computing trendency of all stock scheduler.add_job(join_quant_follower_sell, 'cron', day_of_week='mon-fri', hour=9, minute=27, args=[client]) scheduler.add_job(join_quant_follower_buy, 'cron', day_of_week='mon-fri', hour=9, minute=31, args=[client]) # join_quant_follower_sell(client,session) # join_quant_follower_buy(client,session) try: scheduler.start() except (KeyboardInterrupt, SystemExit): scheduler.remove_all_jobs()
def run(): executors = { 'default': { 'type': 'threadpool', 'max_workers': 10 }, 'processpool': ProcessPoolExecutor(max_workers=5) } scheduler = BlockingScheduler() scheduler.configure(executors=executors) start = datetime.today() - timedelta(days=45) start = start.strftime('%Y-%m-%d') # source_dir = 'data/industry_sw/' industry = 'all' max_file_count = 1000 seq_dim = 20 input_dim = 5 out_dim = 8 #add job for computing trendency of all stock scheduler.add_job( stock_model_sys_v2.select_best_stock_at_yestoday, 'cron', day_of_week='mon-fri', hour=3, minute=0, args=[start, industry, max_file_count, seq_dim, input_dim, out_dim]) #select best stock when matket has opened scheduler.add_job( stock_model_sys_v2.select_best_stock_after_open, 'cron', day_of_week='mon-fri', hour=9, minute=26, args=[start, industry, max_file_count, seq_dim, input_dim, out_dim]) try: scheduler.start() except (KeyboardInterrupt, SystemExit): scheduler.remove_all_jobs()
def get_theme_colors(theme_name): with open(theme_name, "r") as f: t = json.load(f) return t['colors'] if __name__ == "__main__": settings = parse_settings() theme = Theme(settings['theme']) colors = theme.colors # create the bar bar = barhandler(theme) #Configure scheduler scheduler = BlockingScheduler() scheduler.configure(timezone='Europe/Stockholm') #Schedule jobs scheduler.add_job(get_time, 'interval', seconds=30, next_run_time=datetime.now(), args=[colors]) scheduler.add_job(get_battery, 'interval', seconds=1, next_run_time=datetime.now(), args=[colors]) scheduler.add_job(get_cpu, 'interval', seconds=5, next_run_time=datetime.now(), args=[colors]) scheduler.add_job(get_mpd, 'interval', seconds=1, next_run_time=datetime.now(), args=[colors]) scheduler.add_job(get_volume, 'interval', seconds=1, next_run_time=datetime.now(), args=[colors]) scheduler.add_job(get_wifi, 'interval', seconds=1, next_run_time=datetime.now(), args=[colors]) #Start continious jobs bspccontrol = BspcControl(bar) Thread(target=bspccontrol.inputhandler, args=(colors,)).start() #Start scheduler scheduler.start()
class DisseminationPlayer(object): MIDNIGHT = datetime.time(0,0,0) def __init__(self, top_data_dir, index_file, dir_files_to_parse, files_to_parse, job_func, destination): """ :return: """ self._parser = eumetsat.dmon.parsers.xferlog_parser.XferlogParser(no_gems_header = True) self._dir_files = dir_files_to_parse self._files = files_to_parse self._job_func = job_func self._scheduler = BlockingScheduler() res = [] t = ftimer(Indexer.load_index, [top_data_dir, index_file], {}, res) print("Read index in %d seconds." % (t)) self._index = res[0] #can now set reference time #ref time = now time plus one minute self._defer_time = 5 self._reference_date = datetime.datetime.now() + datetime.timedelta(seconds=self._defer_time) #destination info (depends on the type of job) self._destination = destination def add_jobs(self): """ Create the jobs from the reference time :return: """ for a_file in self._files: f_path = "%s/%s" % (self._dir_files, a_file) print("Parsing xferlog file %s" % f_path ) fd = open(f_path) self._parser.set_lines_to_parse(fd) for elem in self._parser: #print("time = %s, filename = %s\n" % (elem['time'], elem['file'])) #find file in index filepath = self._index.get(elem['file'], None) if filepath: #get time difference midnight_date = utc.localize(datetime.datetime.combine(elem['time'].date(), self.MIDNIGHT)) #print("midnight date = %s ///// elem[time] = %s" % (midnight_date, elem['time'])) time_diff = elem['time'] - midnight_date scheduled_date = self._reference_date + time_diff #create job and schedule it with the time difference added to the starting reference time d_trigger = DateTrigger(scheduled_date) self._scheduler.add_job(self._job_func, d_trigger, args=[filepath, self._destination]) else: print("Could not find %s\n in Index" % (elem['file'])) print("Player. %d jobs scheduled.\n" % (len(self._scheduler.get_jobs()))) def start(self): """ :return: """ self._scheduler.configure(jobstores=jobstores, executors=executors, job_defaults=job_defaults, timezone=utc) print("Start Scheduler. Jobs will start to be played in %d sec." % self._defer_time) self._scheduler.start()
class Monitor (): def __init__ (self, bay = 2, temperature_ctrl = None, wait_email = 20, wait_T_readout = 30): self._bay = bay self._name = 'bay'+str(bay) self._notifications = True self._wait_email = wait_email self._wait_T_readout = wait_T_readout self._offset = 100 self._pwd = None self._temperature_ctrl = temperature_ctrl if temperature_ctrl: self._Tctrl = temperature_ctrl else: self._Tctrl = None print ("No temperature controller!") self._max_T = 10 self._scheduler = BlockingScheduler() self._scheduler.configure(timezone='UTC') self._scheduler.add_job(self._check_email, 'interval', seconds=self._wait_email) self._scheduler.add_job(self._get_temperature, 'interval', seconds=self._wait_T_readout) def login (self): try: print ("Enter password...") self._pwd = getpass.getpass() self._email = QPLemail.QPLmail(bay=self._bay, password=self._pwd) except: print ("Login failed!") def set_max_temperature (self, T=10): self._max_T = T def set_channel (self, channel): self._channel = channel def _check_email (self): msg_dict = self._email.fetch_unread() for msg in msg_dict: body = msg['body'][0].as_string() #print (msg) sender = msg['mail_from'][0] sender_addr = msg['mail_from'][1] #print (sender) #print (sender_addr) if (body.find ('notifications-off')>0): self._deactivate(sender_addr) elif (body.find ('notifications-on')>0): self._activate(sender_addr) elif (body.find('get-temperature')>0): T = self._get_temperature() # here I need to extract the sender email address, not the name self._email.send (to=[sender_addr], subject='Temperature readout', message='Current temperature: '+str(self._curr_T)+'K') elif (body.find ('send-report')>0): self._send_report() else: print ("None") def _send_alarm_email (self): email_to = ['*****@*****.**', '*****@*****.**'] #email_to = ['*****@*****.**'] self._email.send (to=email_to, subject='Help!', message='Current temperature: '+str(self._curr_T)+'K') print ("ALARM: temperature = "+str(self._curr_T)+ "K. Email sent to: ") print (email_to) def _activate(self, sender): self._notifications = True print ("Notifications activated, as requested by: "+sender) self._email.send (to=['*****@*****.**', sender], subject='Settings change', message='Notifications activated, as requested by '+sender) def _deactivate(self, sender): self._notifications = False print ("Notifications de-activated, as requested by: "+sender) self._email.send (to=['*****@*****.**', sender], subject='Settings change', message='Notifications de-activated, as requested by '+sender) def _get_temperature (self, overrule_notifications=False): self._curr_T = self._temperature_ctrl.get_kelvin(channel = self._channel) #print ("Read temperature: ", self._curr_T) if (self._curr_T>self._max_T): if (self._notifications): self._send_alarm_email() return self._curr_T def _send_report (self): pass def start (self): print('Press Ctrl+C to exit') try: self._scheduler.start() while True: time.sleep(1) except (KeyboardInterrupt, SystemExit): # Not strictly necessary if daemonic mode is enabled but should be done if possible self._scheduler.shutdown()
class DisseminationPlayer(object): MIDNIGHT = datetime.time(0, 0, 0) def __init__(self, top_data_dir, index_file, dir_files_to_parse, files_to_parse, job_func, destination): """ :return: """ self._parser = eumetsat.dmon.parsers.xferlog_parser.XferlogParser( no_gems_header=True) self._dir_files = dir_files_to_parse self._files = files_to_parse self._job_func = job_func self._scheduler = BlockingScheduler() res = [] t = ftimer(Indexer.load_index, [top_data_dir, index_file], {}, res) print("Read index in %d seconds." % (t)) self._index = res[0] #can now set reference time #ref time = now time plus one minute self._defer_time = 5 self._reference_date = datetime.datetime.now() + datetime.timedelta( seconds=self._defer_time) #destination info (depends on the type of job) self._destination = destination def add_jobs(self): """ Create the jobs from the reference time :return: """ for a_file in self._files: f_path = "%s/%s" % (self._dir_files, a_file) print("Parsing xferlog file %s" % f_path) fd = open(f_path) self._parser.set_lines_to_parse(fd) for elem in self._parser: #print("time = %s, filename = %s\n" % (elem['time'], elem['file'])) #find file in index filepath = self._index.get(elem['file'], None) if filepath: #get time difference midnight_date = utc.localize( datetime.datetime.combine(elem['time'].date(), self.MIDNIGHT)) #print("midnight date = %s ///// elem[time] = %s" % (midnight_date, elem['time'])) time_diff = elem['time'] - midnight_date scheduled_date = self._reference_date + time_diff #create job and schedule it with the time difference added to the starting reference time d_trigger = DateTrigger(scheduled_date) self._scheduler.add_job(self._job_func, d_trigger, args=[filepath, self._destination]) else: print("Could not find %s\n in Index" % (elem['file'])) print("Player. %d jobs scheduled.\n" % (len(self._scheduler.get_jobs()))) def start(self): """ :return: """ self._scheduler.configure(jobstores=jobstores, executors=executors, job_defaults=job_defaults, timezone=utc) print("Start Scheduler. Jobs will start to be played in %d sec." % self._defer_time) self._scheduler.start()
from apscheduler.schedulers.background import BlockingScheduler from project.jobs.redpacket import rollback_unspent_redpacket if __name__ == '__main__': scheduler = BlockingScheduler(timezone='MST') scheduler.configure() scheduler.add_job(rollback_unspent_redpacket, 'interval', seconds=3) scheduler.start()
""" # run a job print("JOB now starting. FIle path %s" % (file_path)) print("JOB .....") print("JOB now finished") scheduler = BlockingScheduler() # .. do something else here, maybe add jobs etc. the_date = datetime.datetime.now() + datetime.timedelta(seconds=2) d_trigger = DateTrigger(the_date) l = lambda: runnable('/tmtmtmtmtmtmt') scheduler.add_job(func=runnable, trigger=d_trigger, args=['tick\n']) the_date = datetime.datetime.now() + datetime.timedelta(seconds=2) d_trigger = DateTrigger(the_date) scheduler.add_job(func=runnable, trigger=d_trigger, args=['tick1\n']) scheduler.configure(jobstores=jobstores, executors=executors, job_defaults=job_defaults, timezone=utc) scheduler.start()
def runnable(file_path): """ :return: """ # run a job print("JOB now starting. FIle path %s" % (file_path)) print("JOB .....") print("JOB now finished") scheduler = BlockingScheduler() # .. do something else here, maybe add jobs etc. the_date = datetime.datetime.now() + datetime.timedelta(seconds=2) d_trigger = DateTrigger(the_date) l = lambda: runnable('/tmtmtmtmtmtmt') scheduler.add_job(func=runnable, trigger=d_trigger, args=['tick\n']) the_date = datetime.datetime.now() + datetime.timedelta(seconds=2) d_trigger = DateTrigger(the_date) scheduler.add_job(func=runnable, trigger=d_trigger, args=['tick1\n']) scheduler.configure(jobstores=jobstores, executors=executors, job_defaults=job_defaults, timezone=utc) scheduler.start()
bar.battery = percentage def getip(): cmd = subprocess.check_output(('ip', 'route')).decode('ascii') match = re.search("src ([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})", cmd) if match: bar.ip = match.group(1) else: bar.ip = "None" #Configure scheduler scheduler = BlockingScheduler() scheduler.configure(timezone='Europe/Amsterdam') #Schedule jobs scheduler.add_job(getmemory, 'interval', seconds=2, next_run_time=datetime.now()) scheduler.add_job(getcurrenttime, 'interval', seconds=1, next_run_time=datetime.now()) scheduler.add_job(getbattery, 'interval', seconds=10, next_run_time=datetime.now()) scheduler.add_job(getip, 'interval', seconds=10, next_run_time=datetime.now())
class JobLauncher(object): def __init__(self, background=False, deamon=True, **kwargs): logging.basicConfig(format="[%(asctime)s] %(message)s", atefmt="%Y-%m-%d %H:%M:%S") logging.getLogger('apscheduler').setLevel(logging.DEBUG) if background: self.sched = BackgroundScheduler(deamon=deamon) # background else: self.sched = BlockingScheduler(deamon=deamon) # foreground # TODO: Read from configuration file. self.sched.configure( jobstores={ # "sqlite": SQLAlchemyJobStore(url='sqlite:///app/database/example.db'), # "default": MemoryJobStore() "default": SQLAlchemyJobStore(url='sqlite:///app/database/example.db') }, executors={ 'default': ThreadPoolExecutor(20), 'processpool': ProcessPoolExecutor(5) }, job_defaults={ 'coalesce': False, 'max_instances': 3 }, timezone=get_localzone() # Asia/Seoul ) self.retried = 0 self.logger = logging.getLogger('apscheduler') super(JobLauncher, self).__init__() def start(self): try: if self.sched.state != STATE_RUNNING: self.printJobs(jobstore='default') started = self.sched.start() except ConflictingIdError as e: traceback.print_exc() except KeyboardInterrupt as e: traceback.print_exc() finally: pass # Remove all remained store. # self.sched.remove_all_jobs() # for job in self.getJobs(): # if job.pending: # job.pause() self.logger.info('Finished') self.logger.info(self.getJobs()) self.printJobs() def stop(self, wait=False): if self.sched.state == STATE_RUNNING: self.sched.shutdown(wait=wait) def resume(self): if self.sched.state == STATE_RUNNING: self.sched.resume() def pause(self): if self.sched.state == STATE_RUNNING: self.sched.pause() def addListener(self, listener, types): self.sched.add_listener(listener, types) def addJob(self, job, **kwargs): execute, trigger, options = job.build(**kwargs) added_job = self.sched.add_job(execute, trigger, **options) self.printJobs() return added_job def getJob(self, job_id): return self.sched.get_job(job_id) def getJobs(self, jobstore=None): return self.sched.get_jobs(jobstore=jobstore) def removeJob(self, job_id, jobstore=None): return self.sched.remove_job(job_id, jobstore=jobstore) def removeAllJob(self, jobstore=None): return self.sched.remove_all_jobs(jobstore=jobstore) def printJobs(self, jobstore=None, out=None): return self.sched.print_jobs(jobstore=jobstore, out=None) def getJobState(self, job_id=None, jobstore=None): state = list() if job_id is not None: job = self.sched.get_job(job_id, jobstore=jobstore) if job is not None: temp = dict() temp[job.id] = { "next_run_time": job.next_run_time, "state": job.pending, } state.append(temp) else: for job in self.sched.get_jobs(jobstore=jobstore): temp = dict() temp[job.id] = { "next_run_time": job.next_run_time, "state": job.pending, } state.append(temp) return state
output = subprocess.check_output(('acpi')).decode('ascii') if 'Battery' in output: percentage = output.split(' ')[3].replace("%", "").replace(",", "").strip() bar.battery = percentage def getip(): cmd = subprocess.check_output(('ip', 'route')).decode('ascii') match = re.search("src ([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})", cmd) if match: bar.ip = match.group(1) else: bar.ip = "None" #Configure scheduler scheduler = BlockingScheduler() scheduler.configure(timezone='Europe/Amsterdam') #Schedule jobs scheduler.add_job(getmemory, 'interval', seconds=2, next_run_time=datetime.now()) scheduler.add_job(getcurrenttime, 'interval', seconds=1, next_run_time=datetime.now()) scheduler.add_job(getbattery, 'interval', seconds=10, next_run_time=datetime.now()) scheduler.add_job(getip, 'interval', seconds=10, next_run_time=datetime.now()) scheduler.add_job(getwindowtitle, 'interval', seconds=.1, next_run_time=datetime.now()) #Start continious jobs bspccontrol = BspcControl(bar) Thread(target=bspccontrol.inputhandler).start() #Start scheduler scheduler.start()