def backup_schedule(): schedule = BackgroundScheduler() schedule.start() schedule.add_job(test, 'cron', second="*/2", minute="*", hour="*", month="*", day="*", week="*") while True: pass
def get_args(): try: opts, args = getopt.getopt(sys.argv[1:], 'ho:s:e:') except getopt.GetoptError as err: print(err) sys.exit(2) status = True if opts: for opt, arg in opts: if '-o' in opt: global filename filename = arg elif '-s' in opt: Sy = arg.split('-')[0] Sm = arg.split('-')[1] Sd = arg.split('-')[2] elif '-e' in opt: Ey = arg.split('-')[0] Em = arg.split('-')[1] Ed = arg.split('-')[2] elif '-h' in opt: status = False print('-s'.ljust(10), 'start_time: end_year-end_month-end_day') print('-e'.ljust(10), 'end_time: end_year-end_month-end_day') print('-o'.ljust(10), 'filename:outfilename.txt') print( 'For example:python ISC -s2005-1-1 -e2016-1-1,-o filename.txt' ) keys = [ 'start_year', 'start_month', 'start_day', 'end_year', 'end_month', 'end_day' ] try: for key, value in zip(keys, [Sy, Sm, Sd, Ey, Em, Ed]): query[key] = value except: pass if status: start(query, filename)
logger_format = '%(asctime)-15s %(message)s' logging.basicConfig(format=logger_format) logger = logging.getLogger('load-data-log') logger.setLevel(logging.DEBUG) app = Flask(__name__) # app.config.from_envvar('ENV_CONFIG_FILE') kafka_broker = '192.168.99.100:9092' topic_name = 'stock-analyzer' CORS(app) producer = KafkaProducer(bootstrap_servers=kafka_broker) schedule = BackgroundScheduler() schedule.add_executor('threadpool') schedule.start() symbols = set() def shutdown_hook(producer): try: producer.flush(10) except KafkaError as e: logger.warn('flushing error') finally: try: producer.close() logger.info('closed') except Exception as e: logger.warn('close fails')
def dojob(): schedule = BlockingScheduler() schedule.add_job(func_stimr, 'interval', seconds=2, id='tt_job1') schedule.start()