Example #1
0
def get_scheduler(blocking=False):
    scheduler = None
    if blocking:
        scheduler = BlockingScheduler()
    else:
        scheduler = BackgroundScheduler()
    return scheduler
def main():
    try:
        config = {
                'REDIS_URL':  os.environ['REDIS_URL'],
                'ROUTER_HOST': os.environ['ROUTER_HOST'],
                'ROUTER_USER': os.environ['ROUTER_USER'],
                'ROUTER_PASS': os.environ['ROUTER_PASS'],
                'INTERVAL': int(os.environ.get('INTERVAL', 60)),
                'LOGLEVEL': getattr(logging,
                                    os.environ.get('LOGLEVEL', 'WARNING')),
        }
    except KeyError as exc:
        logging.error("Failed to get configuration from the environment: {}"
                      .format(exc))
        sys.exit(1)

    logging.basicConfig(level=config['LOGLEVEL'])
    logging.getLogger("apscheduler").setLevel(logging.WARNING)

    scheduler = BlockingScheduler()

    logging.warning("Checking the ARP table on `{}' every {} seconds."
                    .format(config['ROUTER_HOST'], config['INTERVAL']))

    scheduler.add_job(run, 'interval', [config],
                      seconds=config['INTERVAL'], coalesce=True)
    scheduler.start()
Example #3
0
    def __init__(self, background=False, deamon=True, **kwargs):
        logging.basicConfig(format="[%(asctime)s] %(message)s",
                            atefmt="%Y-%m-%d %H:%M:%S")
        logging.getLogger('apscheduler').setLevel(logging.DEBUG)

        if background:
            self.sched = BackgroundScheduler(deamon=deamon)  # background
        else:
            self.sched = BlockingScheduler(deamon=deamon)  # foreground

        # TODO: Read from configuration file.
        self.sched.configure(
            jobstores={
                # "sqlite": SQLAlchemyJobStore(url='sqlite:///app/database/example.db'),
                # "default": MemoryJobStore()
                "default":
                SQLAlchemyJobStore(url='sqlite:///app/database/example.db')
            },
            executors={
                'default': ThreadPoolExecutor(20),
                'processpool': ProcessPoolExecutor(5)
            },
            job_defaults={
                'coalesce': False,
                'max_instances': 3
            },
            timezone=get_localzone()  # Asia/Seoul
        )

        self.retried = 0
        self.logger = logging.getLogger('apscheduler')

        super(JobLauncher, self).__init__()
Example #4
0
def main() -> None:
    args = docopt(__doc__,
                  version='MSA (store) version ' + __version__,
                  options_first=True)
    if args['--verbose']:
        MSALogger.activate_global_info_logging()
    if args['--log-file']:
        MSALogger.set_logfile(args['--log-file'])

    log.info('Opening database connection...')
    db = MSADataBase(config_file=Defaults.get_db_config())

    if args['--dump-db']:
        log.info('Database has the following entries:')
        for entry in db.dump_table():
            log.info(entry)
        return

    log.info('Opening kafka messaging...')
    kafka = MSAKafka(config_file=Defaults.get_kafka_config())

    store_to_database(kafka.read(), db)
    if args['--single-shot']:
        return

    db_scheduler = BlockingScheduler()
    db_scheduler.add_job(lambda: store_to_database(kafka.read(), db),
                         'interval',
                         seconds=args['--update-interval'] or 30)
    db_scheduler.start()
Example #5
0
 def __init__(self):
     self.scheduler = BlockingScheduler(
         jobstores=APSCHEDULER_SETTINGS['jobstores'],
         executors=APSCHEDULER_SETTINGS['executors'],
         job_defaults=APSCHEDULER_SETTINGS['job_defaults'],
         timezone=TIMEZONE_PST8PDT)
     pass
Example #6
0
    def __init__(self,
                 blocking=True,
                 timezone='UTC',
                 config_path='.',
                 logger_level=None,
                 *args,
                 **kwargs):
        """
        Create ReminderDaemon object.

        :param boolean blocking:
            Determines if Scheduler should be BlockingScheduler or BackgroundScheduler.
        :param str timzone: Timezone for the scheduler to use when scheduling jobs.
        :param str config_path: Path to configuration files.
        :param int logger_level: Level to set logger to.
        """
        self.logger = logging.getLogger(__name__)
        if logger_level:
            self.logger.setLevel(logger_level)
        self.logger.debug('initializing daemon')
        self.scheduler = BlockingScheduler(
            timezone=timezone) if blocking else BackgroundScheduler(
                timezone=timezone)
        self.reminders = []
        self.configs = {}
        self.timezone = timezone
        self._observer = Observer()
        self.config_path = config_path
        self._watchdog_handler = PatternMatchingEventHandler('*.yaml;*.yml')
        self._watchdog_handler.on_created = self.on_created
        self._watchdog_handler.on_modified = self.on_created
        self._watchdog_handler.on_deleted = self.on_deleted
        self._observer.schedule(self._watchdog_handler, self.config_path)
def getJob(fileName='AutoSentChatroom.xlsx', sheetName='Chatfriends'):
    scheduler = BlockingScheduler()
    workbook = xlrd.open_workbook(
    os.path.join(os.path.dirname(os.path.realpath(__file__)), fileName))
    sheet = workbook.sheet_by_name(sheetName)
    iRows = sheet.nrows
    index = 1
    for i in range(1, iRows):
        textList = sheet.row_values(i)
        name = textList[0]
        context = textList[2]
        float_dateTime = textList[1]
        date_value = xlrd.xldate_as_tuple(float_dateTime, workbook.datemode)
        date_value = datetime(*date_value[:5])
        if datetime.now() > date_value:
            continue
        date_value = date_value.strftime('%Y-%m-%d %H:%M:%S')
        textList[1] = date_value
        scheduler.add_job(SentChatMsg, 'date', run_date=date_value,
                          kwargs={"name": name, "context": context, 'scheduler':scheduler})
        print("任务" + str(index) + ":\n"
                                  "待发送时间:" + date_value + "\n"
                                                          "待发送到:" + name + "\n"
                                                                           "待发送内容:" + context + "\n"
                                                                                                "******************************************************************************\n")
        index = index + 1
        if index == 1:
            print("***没有任务需要执行***")      
    return scheduler
def cli(ctx, helium_key, darksky_key, lat, lon, sensor, every):
    """Monitor weather for a lat/lon locaation.

    This sample service shows how you can use an external weather
    service to emit to a virtual sensor in the Helium platform.

    \b
    he-weather  --every <seconds> <sensor> <lat> <lon>

    The given virtual <sensor> is the id of a created Helium virtual
    sensor.

    The optional <seconds> parameter sets how often weather
    information needs to get fetched and posted to Helium. If the
    parameter is not provided a default (60 seconds)) is picked.

    This will run the service based on the given lat/lon.

    """
    client = Client(api_token=helium_key)
    sensor = Sensor.find(client, sensor)

    logging.basicConfig()
    scheduler = BlockingScheduler()
    scheduler.add_job(_process_weather,
                      "interval",
                      seconds=every,
                      next_run_time=datetime.now(),
                      args=[darksky_key, lat, lon, sensor])
    click.echo("Checking every {} seconds".format(every))
    scheduler.start()
Example #9
0
 def get_code(self):
     
     sched = BlockingScheduler(timezone='MST')
     #表示从周一到周五9点30分10秒更新
     #sched.add_job(scheduler, 'cron', day_of_week='mon-fri', hour='9', minute='30', second='10')
     sched.add_job(self.get_code_true, 'interval',  seconds=300)
     sched.start()
Example #10
0
 def time_send(self):
     self.settime()
     try:
         f= open('time.json', 'r')
     except:
         QMessageBox.information(self,'提示','wrong')
     else:
         data = json.load(f)
         set_time = data['time']
         data = time.strptime(set_time,'%Y-%m-%d %H:%M:%S')
         data = datetime(data[0], data[1], data[2], data[3], data[4], data[5])
         if datetime.now()<=data:
             name = self.choose_obj()
             if name:
                 scheduler = BlockingScheduler()
                 scheduler.add_job(SentChatMsg, 'date', run_date=set_time,
                         kwargs={'username':self.get_username(name), "context": self.radio_checked()})
                 QMessageBox.information(self,'提示框',"设置完成" + ":\n"
                                "待发送时间:" + set_time + "\n"
                                                  +"待发送到:" + name + "\n"
                                                                 +"待发送内容:" + '\n'+self.radio_checked() + "\n"
                                                                                 "******************\n")
                 scheduler.start()
             else:
                 QMessageBox.information(self,'提示','Wrong')
         else :
             QMessageBox.warning(self,'提示','时间设置错误,请重新设置!')
Example #11
0
 def task(self):
     """
     !!!!this function is useless don't run it!!!!
     Parameters:
         year (int|str) – 4-digit year
         month (int|str) – month (1-12)
         day (int|str) – day of the (1-31)
         week (int|str) – ISO week (1-53)
         day_of_week (int|str) – number or name of weekday (0-6 or mon,tue,wed,thu,fri,sat,sun)
         hour (int|str) – hour (0-23)
         minute (int|str) – minute (0-59)
         second (int|str) – second (0-59)
         start_date (datetime|str) – earliest possible date/time to trigger on (inclusive)
         end_date (datetime|str) – latest possible date/time to trigger on (inclusive)
         timezone (datetime.tzinfo|str) – time zone to use for the date/time calculations (defaults to scheduler timezone)
     :return:
     """
     scheduler = BlockingScheduler()
     #scheduler.add_job(self.task_func, trigger='cron', day='*/1', hour='1')
     scheduler.add_job(self.task_func, trigger='cron', minute='*/5')
     #scheduler.add_job(func, 'date', run_date='2016-10-25 13:51:30')
     try:
         scheduler.start()
     except Exception as e:
         # TODO 执行错误的处理方案
         Global.logger.error('定时任务错误:%s' % e)
         scheduler.shutdown()
Example #12
0
 def init_scheduler(self, **kwargs: Any) -> None:
     if self.blocking:
         self.scheduler = BlockingScheduler(jobstores=self.jobstores,
                                            **kwargs)
     else:
         self.scheduler = BackgroundScheduler(jobstores=self.jobstores,
                                              **kwargs)
Example #13
0
def start_jukebox(music_folder=music_folder, db_path=db):
    """
    :param music_folder: string, path to music
    :param db_path: string, path to store database
    :return: None
    """

    db = 'sqlite:///{}'.format(db_path)

    engine = create_engine(db, echo=False)
    Session = sessionmaker(bind=engine)
    session = Session()

    # initiate database
    if not os.path.isfile('../db/dev.db'):
        print('No existing database found, starting new session')
        Base.metadata.create_all(engine)

    populate(session, music_folder)

    round_end = setup_new_round(session, first_round=True)
    first_round = datetime.now() + timedelta(minutes=0, seconds=1)
    scheduler = BlockingScheduler()

    # first songs starts after first round of voting (1 minute)
    scheduler.add_job(play_next_song,
                      'date',
                      run_date=first_round,
                      args=[Session, scheduler, music_folder])
    print('Starting Jukebox')
    scheduler.start()

    return None
Example #14
0
def set_watchdog_on_wdk_models():
    """
    Method updating database with user defined Workflows.

    :return: None
    """

    try:
        watchdog = Watchdog.load()
    except ProgrammingError:
        # raised in case of not existing models in db (e.g. on the python manage.py migrate)
        print("Watchdog singleton cannot be fetched from db.")
        return

    if not watchdog.running and WFE_WATCHDOG_INTERVAL > 0:
        # order deregister_watchdog() executions as exit function.
        atexit.register(deregister_watchdog)

        # mark watchdog as running
        watchdog.running = True
        watchdog.save()
        # schedule periodic watchdog's execution
        scheduler = BlockingScheduler(daemon=True)
        scheduler.add_job(update_wdk_models,
                          "interval",
                          seconds=WFE_WATCHDOG_INTERVAL)
        scheduler.start()
    elif WFE_WATCHDOG_INTERVAL <= 0:
        print(
            f"Watchdog turned of by WFE_WATCHDOG_INTERVAL equal: {WFE_WATCHDOG_INTERVAL}"
        )
    elif watchdog.running:
        print(f"Watchdog process already running.")
Example #15
0
def main():
    log = logging.getLogger("main")
    cameras, light, project = parse_configs()

    storage_dir = Path(project["storage_dir"])
    if not storage_dir.is_dir():
        storage_dir.mkdir(parents=True)

    # We will use queue to connect scheduler thread with I2C communication thread
    connection_queue = Queue()

    # Create separate thread for I2C communication
    log.info("Starting I2C thread")
    i2c_driver = I2CDriver(0x04)
    i2c_thread = Thread(target=i2c_thread_function,
                        args=(i2c_driver, connection_queue))
    i2c_thread.start()

    log.info("Running pipeline for the first time")
    pipeline_executor = PipelineExecutor(storage_dir, cameras, light,
                                         connection_queue, project["pipeline"])

    # For the first time, execute pipeline manually, then schedule it
    pipeline_executor.execute()

    # Create a scheduler and add job to it
    log.info("Scheduling the pipeline")
    scheduler = BlockingScheduler()
    scheduler.add_job(
        func=(lambda executor=pipeline_executor: executor.execute()),
        trigger="interval",
        seconds=project['run_interval_seconds'])
    atexit.register(lambda: scheduler.shutdown())
    scheduler.start()  # Blocks thread
def run():
    executors = {
        'default': {
            'type': 'threadpool',
            'max_workers': 10
        },
        'processpool': ProcessPoolExecutor(max_workers=5)
    }
    scheduler = BlockingScheduler()
    scheduler.configure(executors=executors)

    client = api.use('ths', debug=False)
    client.connect(r"c:\\workspace\\同花顺\\\\xiadan.exe", timeout=5)
    client.enable_type_keys_for_editor()
    # add job for computing trendency of all stock
    scheduler.add_job(join_quant_follower_sell,
                      'cron',
                      day_of_week='mon-fri',
                      hour=9,
                      minute=27,
                      args=[client])
    scheduler.add_job(join_quant_follower_buy,
                      'cron',
                      day_of_week='mon-fri',
                      hour=9,
                      minute=31,
                      args=[client])
    # join_quant_follower_sell(client,session)
    # join_quant_follower_buy(client,session)
    try:
        scheduler.start()
    except (KeyboardInterrupt, SystemExit):
        scheduler.remove_all_jobs()
Example #17
0
    def __init__(self, top_data_dir, index_file, dir_files_to_parse,
                 files_to_parse, job_func, destination):
        """
            :return:
        """
        self._parser = eumetsat.dmon.parsers.xferlog_parser.XferlogParser(
            no_gems_header=True)
        self._dir_files = dir_files_to_parse
        self._files = files_to_parse
        self._job_func = job_func
        self._scheduler = BlockingScheduler()

        res = []
        t = ftimer(Indexer.load_index, [top_data_dir, index_file], {}, res)
        print("Read index in %d seconds." % (t))
        self._index = res[0]

        #can now set reference time
        #ref time = now time plus one minute
        self._defer_time = 5
        self._reference_date = datetime.datetime.now() + datetime.timedelta(
            seconds=self._defer_time)

        #destination info (depends on the type of job)
        self._destination = destination
Example #18
0
def main(config):
    """
    Setup logging, start the job scheduler and serve prometheus metrics
    """

    LOGGER.info('Sarting application at http://localhost:8000')

    executors = {
        'default': ThreadPoolExecutor(20),
    }
    job_defaults = {
        'coalesce': False,
        'max_instances': 3
    }

    scheduler = BlockingScheduler(
        executors=executors, job_defaults=job_defaults, timezone=utc)


    scheduler.add_job(download_speed, 'interval', seconds=600,
                      args=[config['downloadURL']], id='download_speed')
    scheduler.add_job(latency, 'interval', seconds=60,
                      args=[config['icmpDestHost']], id='ping')

    # start prometheus server to serve /metrics and /describe endpoints
    start_http_server(8000)
    scheduler.start()
Example #19
0
def main():
    global databaseFile
    databaseFile = '.database'
    loadDatabase()
    twitterAuth()

    # Scheduler for any different task
    schd = BlockingScheduler()
    '''
    Using a scheduler to check every 5 minutes for new tweets. If you want to add more twitter's profile, just follow example below:

    schd.add_interval_job(checkTwitter, minutes = MIN,  args = ['profile'])
    '''
    schd.add_job(checkTwitter, 'interval', minutes=5, args=['rmtcgoiania'])
    schd.add_job(checkTwitter, 'interval', minutes=5, args=['jornalopcao'])
    ## Using a scheduler to get every 6 hours and 16 hours informations about weather
    schd.add_job(checkWeather, 'cron', hour='6,16', minute=00)
    ## Using a scheduler to get every 6 hours and 16 hours informations about quotation
    schd.add_job(checkQuotation,
                 'cron',
                 day_of_week='mon-fri',
                 hour='8,14',
                 minute=00)
    schd.start()

    # Keeping the main thread alive
    while True:
        time.sleep(300)
Example #20
0
    def start(self):
        self.scheduler = BlockingScheduler(timezone=utc)

        self.scheduler.add_job(self._purge_images,
                               'cron',
                               hour='5',
                               minute='0')
        self.scheduler.start()
Example #21
0
def init():
    sched = BlockingScheduler()
    global DB
    DB  = price_db.priceDB()

    # seconds can be replaced with minutes, hours, or days
    sched.add_job(writeToDB, 'cron', minute='*/2', misfire_grace_time = 10 , coalesce = True)
    sched.start()
Example #22
0
def start_scheduler():
    """Start the job scheduler"""
    app = get_or_create()
    sched = BlockingScheduler(daemon=True)
    sched.add_job(mark_old_processing_data_files_as_failed,
                  'interval',
                  minutes=60,
                  kwargs=dict(app=app))
    sched.start()
 def __init__(self, main_config):
     self.main_config = main_config
     self.create_dirs()
     self.logger = get_logger(main_config['project_name'],
                              file=main_config['logs_dir'],
                              level=main_config['log_level'])
     self.board = None
     self.scheduler = BlockingScheduler()
     self.setup()
     atexit.register(self._exit)
Example #24
0
def main():
    itchat.auto_login()
    # itchat.auto_login(hotReload=True)
    scheduler = BlockingScheduler()
    # job = scheduler.add_job(send_file_by_time, 'date', next_run_time='2019-05-06 16:46:30')
    trigger = DateTrigger(run_date='2019-05-10 15:25:30')
    # job = scheduler.add_job(send_file_by_time, trigger='date', next_run_time='2019-05-10 14:30:30')
    job = scheduler.add_job(send_file_by_time, trigger)
    scheduler.start()
    job.remove()
Example #25
0
def main():
    scheduler = BlockingScheduler()
    scheduler.add_job(miaosha_kz,
                      'cron',
                      day="*/7",
                      hour='19',
                      minute="0",
                      second='1',
                      timezone=kzconfig.cst_tz)
    scheduler.start()
Example #26
0
    def run(self):

        job_defaults = {'coalesce': True, 'max_instances': 1}
        self.blocker = BlockingScheduler(job_defaults=job_defaults)
        self.blocker.add_job(self.parse_feed,
                             'cron',
                             second=f'*/{self.sleep}',
                             id='parse_feed')

        self.blocker.start()
Example #27
0
 def cmd_start(self):
     from apscheduler.schedulers.background import BlockingScheduler
     sched = BlockingScheduler()
     with transaction.manager:
         Scheduler.add_all_to_apscheduler(sched,
                                          DbSession,
                                          user=SYSTEM_UID,
                                          begin_transaction=True)
     sched.start()
     sched.print_jobs()
 def __init__(self, reporter: ResultReporter):
     self.reporter = reporter
     self.scheduler = BlockingScheduler()
     self.events = list()
     log_path = static_setting.settings["CaseRunner"].log_path
     log_file = os.path.join(log_path, "event_scheduler_log.log")
     self.log = logger.register("EventScheduler",
                                filename=log_file,
                                for_test=True)
     self.scheduler.add_listener(self._event_listen, EVENT_JOB_EXECUTED)
Example #29
0
 def _run(self):
     """ run your fun"""
     scheduler = BlockingScheduler()
     scheduler.add_job(PrintText().start, 'interval', seconds=3)
     # scheduler.add_job(PrintText().start, 'cron',  hour=start_hour, minute=start_minute,second='0')
     try:
         scheduler.start()
     except KeyboardInterrupt, SystemExit:
         scheduler.shutdown()
         logger.error('Exit The Job!')
Example #30
0
def main(cfgfile, savefile='dev_info.yaml') -> None:
    # Setup
    cfg = Config(cfg=cfgfile)
    scheduler = BlockingScheduler()
    scheduler.add_job(dcheck,
                      'interval',
                      args=[cfg, savefile],
                      minutes=cfg.interval)
    scheduler.start()
    return